diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a41da18 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,29 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -e .[dev] + + - name: Lint + run: make lint + + - name: Run tests + run: make test diff --git a/.gitignore b/.gitignore index 9308a4b..d40d7cf 100644 --- a/.gitignore +++ b/.gitignore @@ -258,6 +258,17 @@ pythontex-files-*/ # easy-todo *.lod +# MiniChain local planning docs (do not commit) +issues.md +architectureProposal.md + +# Python caches and virtualenvs +__pycache__/ +*.py[cod] +.pytest_cache/ +.ruff_cache/ +.venv/ + # xcolor *.xcp @@ -324,3 +335,6 @@ TSWLatexianTemp* # option is specified. Footnotes are the stored in a file with suffix Notes.bib. # Uncomment the next line to have this generated file ignored. #*Notes.bib + + +docs/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..a99220a --- /dev/null +++ b/Makefile @@ -0,0 +1,21 @@ +PYTHON ?= python3 + +.PHONY: install dev-install test lint format start-node + +install: + $(PYTHON) -m pip install . + +dev-install: + $(PYTHON) -m pip install -e .[dev] + +test: + $(PYTHON) -m pytest + +lint: + $(PYTHON) -m ruff check src tests + +format: + $(PYTHON) -m ruff format src tests + +start-node: + PYTHONPATH=src $(PYTHON) -m minichain --host 127.0.0.1 --port 7000 diff --git a/doc/implementation.md b/doc/implementation.md new file mode 100644 index 0000000..78cbd82 --- /dev/null +++ b/doc/implementation.md @@ -0,0 +1,105 @@ +# MiniChain Implementation Reference + +This document summarizes the current MiniChain v0 implementation by subsystem. + +## 1) Crypto and Identity (`src/minichain/crypto.py`) + +- Ed25519 key generation/sign/verify through PyNaCl. +- Address derivation from public key digest (20-byte lowercase hex address). +- Serialization helpers for private/public keys. + +## 2) Canonical Serialization (`src/minichain/serialization.py`) + +- Deterministic JSON serialization for consensus-critical payloads. +- Strict field presence/order checks to avoid cross-node divergence. + +## 3) Transactions (`src/minichain/transaction.py`) + +- Account-model transfer transaction with: sender, recipient, amount, nonce, fee, timestamp. +- Signature + public key included for validation. +- Coinbase transaction shape and validation helpers. +- Deterministic transaction id generation. + +## 4) Merkle and Blocks (`src/minichain/merkle.py`, `src/minichain/block.py`) + +- Deterministic Merkle root construction for block transaction lists. +- Block header contains parent hash, height, timestamp, target, nonce, root. +- Block hash = canonical header hash. +- Header/body consistency checks for Merkle root. + +## 5) Consensus and Mining (`src/minichain/consensus.py`, `src/minichain/mining.py`) + +- Proof-of-work target validation and nonce search. +- Difficulty bounds and retarget policy. +- Candidate block building from mempool with coinbase reward + fees. +- Interrupt-capable mining loop support. + +## 6) State and Chain Logic (`src/minichain/state.py`, `src/minichain/chain.py`, `src/minichain/genesis.py`) + +- Account state tracks balance and nonce per address. +- Deterministic tx apply rules: nonce progression, amount+fee affordability, signer identity. +- Canonical chain selection by valid longest chain. +- Reorg support with state replay and validation. +- Configurable genesis creation and initial state. + +## 7) Mempool (`src/minichain/mempool.py`) + +- Signature/identity checks before acceptance. +- Per-sender nonce queueing (ready vs waiting). +- Mining selection prioritizes fee while preserving sender nonce order. +- Eviction by age and capacity. + +## 8) Persistence (`src/minichain/storage.py`) + +- SQLite storage for blocks, chain metadata, and state snapshots. +- Canonical head persistence and restart recovery. +- Replay + snapshot consistency checks on startup. + +## 9) Node Orchestration (`src/minichain/node.py`) + +- `MiniChainNode` composes chain manager, mempool, and storage. +- APIs for transaction submission, block acceptance, and mining. +- Clean startup/shutdown lifecycle with config validation. + +## 10) Networking (`src/minichain/network.py`) + +- TCP handshake (`hello`) and peer management. +- Peer discovery via bootstrap and mDNS/local discovery fallback. +- Gossip protocols: + - `/minichain/tx/1.0.0` + - `/minichain/block/1.0.0` +- Sync protocol (`/minichain/sync/1.0.0`) with range requests. +- Block/tx dedup caches. +- Automatic reconnect loop to bootstrap/known peers. +- Transaction gossip returns explicit `tx_result` ack (accepted/reason). + +## 11) CLI (`src/minichain/__main__.py`) + +Primary command groups: + +- `node start` +- `node run [--peer host:port] [--mine]` +- `node stop` +- `wallet generate-key|balance|details|list` +- `tx submit` +- `chain info|block|accounts` +- `mine` +- `shell` + +Current daemon-aware behavior: + +- If `node run` is active for the same `--data-dir`, `tx submit` routes over network to the running daemon. +- `chain info` includes `connected_peers`. +- Daemon status logging includes `height`, `tip`, `mempool_size`, `connected_peers`. + +## 12) Test Coverage (`tests/`) + +- Unit tests for core primitives and validation logic. +- Integration tests for: + - peer discovery + - tx gossip + - block propagation + - chain sync/catch-up + - fork/reorg and convergence scenarios + +MiniChain v0 currently provides a complete educational blockchain node with deterministic behavior, CLI-driven flows, and reproducible local/multi-node testing. diff --git a/doc/testing.md b/doc/testing.md new file mode 100644 index 0000000..d9502f8 --- /dev/null +++ b/doc/testing.md @@ -0,0 +1,207 @@ +# MiniChain End-to-End Testing + +This document is the single source for testing MiniChain end-to-end: +- single-device flow with live logs +- two-node sync on one machine (multi-device simulation) +- true multi-device LAN sync + +## 1) Prerequisites + +```bash +source .venv/bin/activate +python -m pip install -e . +``` + +Optional clean start: + +```bash +rm -rf .demo-single .demo-a .demo-b +``` + + +## 3) Single-Device End-to-End + +Use two terminals. + +### Terminal A: create keys and run node daemon + +```bash +MINER_OUT=$(minichain --data-dir .demo-single wallet generate-key) +RECIP_OUT=$(minichain --data-dir .demo-single wallet generate-key) + +MINER_ADDR=$(echo "$MINER_OUT" | awk -F= '/address/{gsub(/ /,"",$2); print $2; exit}') +MINER_PK=$(echo "$MINER_OUT" | awk -F= '/private_key/{gsub(/ /,"",$2); print $2; exit}') +RECIP_ADDR=$(echo "$RECIP_OUT" | awk -F= '/address/{gsub(/ /,"",$2); print $2; exit}') + +minichain --data-dir .demo-single --host 127.0.0.1 --port 7000 --miner-address "$MINER_ADDR" \ + node run --advertise-host 127.0.0.1 --mine --mine-interval-seconds 10 --status-interval-seconds 2 +``` + +### Terminal B: verify chain, submit tx, verify balances + + +```bash +minichain --data-dir .demo-single chain info + +minichain --data-dir .demo-single --host 127.0.0.1 --port 7000 tx submit \ + --private-key "$MINER_PK" \ + --recipient "$RECIP_ADDR" \ + --amount 5 \ + --fee 1 \ + --no-mine-now +``` + +Wait for the next mined block, then: + +```bash +minichain --data-dir .demo-single wallet balance --address "$MINER_ADDR" +minichain --data-dir .demo-single wallet balance --address "$RECIP_ADDR" +minichain --data-dir .demo-single chain accounts --limit 20 +``` + +Stop daemon: + +```bash +minichain --data-dir .demo-single node stop +``` + +## 4) Two Nodes on One Machine (Simulate Multi-Device) + +Use two terminals. + +### Terminal A (miner node) + +```bash +MINER_OUT=$(minichain --data-dir .demo-a wallet generate-key) +MINER_ADDR=$(echo "$MINER_OUT" | awk -F= '/address/{gsub(/ /,"",$2); print $2; exit}') +MINER_PK=$(echo "$MINER_OUT" | awk -F= '/private_key/{gsub(/ /,"",$2); print $2; exit}') + +minichain --data-dir .demo-a --host 127.0.0.1 --port 7000 --miner-address "$MINER_ADDR" \ + node run --advertise-host 127.0.0.1 --mine --mine-interval-seconds 15 --status-interval-seconds 2 +``` + +### Terminal B (observer node) + +```bash +RECIP_OUT=$(minichain --data-dir .demo-b wallet generate-key) +RECIP_ADDR=$(echo "$RECIP_OUT" | awk -F= '/address/{gsub(/ /,"",$2); print $2; exit}') + +minichain --data-dir .demo-b --host 127.0.0.1 --port 7001 \ + node run --advertise-host 127.0.0.1 --peer 127.0.0.1:7000 --status-interval-seconds 2 +``` + +### Terminal C (tx submit) + + + +Submit tx to node A: + +```bash +minichain --data-dir .demo-a --host 127.0.0.1 --port 7000 tx submit \ + --private-key "$MINER_PK" \ + --recipient "$RECIP_ADDR" \ + --amount 7 \ + --fee 1 \ + --no-mine-now +``` + +After next mined block on A, verify sync: + +```bash +minichain --data-dir .demo-a chain info +minichain --data-dir .demo-b chain info +minichain --data-dir .demo-a wallet balance --address "$MINER_ADDR" +minichain --data-dir .demo-a wallet balance --address "$RECIP_ADDR" +minichain --data-dir .demo-b wallet balance --address "$MINER_ADDR" +minichain --data-dir .demo-b wallet balance --address "$RECIP_ADDR" +``` + +Stop: + +```bash +minichain --data-dir .demo-a node stop +minichain --data-dir .demo-b node stop +``` + +## 5) True Multi-Device LAN Test + +Assume: +- Device A IP: `` +- Device B IP: `` + +### Device A (miner) + +```bash +minichain --data-dir .demo-a --host 0.0.0.0 --port 7000 --miner-address "$MINER_ADDR" \ + node run --advertise-host --mine --mine-interval-seconds 15 --status-interval-seconds 2 +``` + +### Device B (observer) + +```bash +minichain --data-dir .demo-b --host 0.0.0.0 --port 7001 \ + node run --advertise-host --peer :7000 --status-interval-seconds 2 +``` + +## 6) Complete CLI Command Reference + +Top-level: + +```bash +minichain [--host HOST] [--port PORT] [--data-dir DATA_DIR] [--miner-address MINER_ADDRESS] +``` + +Commands: + +```bash +minichain node start +minichain node run [--peer HOST:PORT] [--advertise-host HOST] [--node-id ID] [--mdns] [--mine] [--mine-interval-seconds N] [--sync-batch-size N] [--status-interval-seconds N] +minichain node stop [--timeout-seconds N] [--force] + +minichain wallet generate-key +minichain wallet balance --address <20-byte-lowercase-hex> +minichain wallet details --address <20-byte-lowercase-hex> +minichain wallet list [--limit N] + +minichain tx submit --private-key --recipient <20-byte-lowercase-hex> --amount N [--fee N] [--nonce N] [--mine-now|--no-mine-now] + +minichain chain info +minichain chain block (--height N | --hash ) +minichain chain accounts [--limit N] + +minichain mine [--count N] [--max-transactions N] +minichain shell +``` + +Useful help commands: + +```bash +minichain --help +minichain node --help +minichain node run --help +minichain node stop --help +minichain wallet --help +minichain wallet balance --help +minichain wallet details --help +minichain wallet list --help +minichain tx --help +minichain tx submit --help +minichain chain --help +minichain chain block --help +minichain chain accounts --help +minichain mine --help +``` + +Legacy aliases (still accepted and auto-remapped): + +```bash +start +generate-key +balance +submit-tx +chain-info +block +``` + + + diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0ab1b89 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[build-system] +requires = ["hatchling>=1.24"] +build-backend = "hatchling.build" + +[project] +name = "minichain" +version = "0.1.0" +description = "Minimal, research-oriented blockchain in Python" +readme = "README.md" +requires-python = ">=3.11" +authors = [{ name = "MiniChain Contributors" }] +dependencies = [ + "PyNaCl>=1.5.0", +] + +[project.optional-dependencies] +network = [ + "py-libp2p>=0.2.0", +] +dev = [ + "pytest>=8.0", + "ruff>=0.7.0", +] + +[project.scripts] +minichain = "minichain.__main__:main" +minichain-node = "minichain.__main__:main" + +[tool.pytest.ini_options] +minversion = "8.0" +addopts = "-q" +testpaths = ["tests"] +pythonpath = ["src"] + +[tool.ruff] +line-length = 100 +target-version = "py311" + +[tool.ruff.lint] +select = ["E", "F", "I"] diff --git a/src/minichain/__init__.py b/src/minichain/__init__.py new file mode 100644 index 0000000..3bc48b2 --- /dev/null +++ b/src/minichain/__init__.py @@ -0,0 +1,4 @@ +"""MiniChain package.""" + +__all__ = ["__version__"] +__version__ = "0.1.0" diff --git a/src/minichain/__main__.py b/src/minichain/__main__.py new file mode 100644 index 0000000..044b37a --- /dev/null +++ b/src/minichain/__main__.py @@ -0,0 +1,1302 @@ +"""CLI entrypoint for running and interacting with MiniChain.""" + +from __future__ import annotations + +import argparse +import asyncio +import contextlib +import json +import os +import shlex +import signal +import textwrap +import time +from dataclasses import asdict +from pathlib import Path + +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + generate_key_pair, + serialize_signing_key, + serialize_verify_key, +) +from minichain.network import ( + TX_GOSSIP_PROTOCOL_ID, + MiniChainNetwork, + NetworkConfig, + PeerAddress, +) +from minichain.node import MiniChainNode, NodeConfig, NodeError +from minichain.transaction import ADDRESS_HEX_LENGTH, Transaction + +DEFAULT_DATA_DIR = os.environ.get("MINICHAIN_DATA_DIR", str(Path.home() / ".minichain")) +NODE_PID_FILENAME = "node.pid" +NODE_RUNTIME_STATUS_FILENAME = "node_runtime_status.json" + + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="MiniChain CLI.", + formatter_class=argparse.RawTextHelpFormatter, + epilog=textwrap.dedent( + """\ + Command Tree + node start + node run [--peer host:port] [--mine] + node stop [--timeout-seconds N] [--force] + wallet generate-key + wallet balance --address + wallet details --address + wallet list [--limit N] + tx submit --private-key --recipient --amount N --fee N [--nonce N] + chain info + chain block --height | --hash + chain accounts [--limit N] + mine --count N [--max-transactions N] + shell + + Legacy commands still work (auto-remapped): + start, generate-key, balance, submit-tx, chain-info, block + """ + ), + ) + parser.add_argument("--host", default="127.0.0.1", help="Host interface for the node") + parser.add_argument("--port", default=7000, type=int, help="Port for the node") + parser.add_argument( + "--data-dir", + default=DEFAULT_DATA_DIR, + help="Directory for node data (sqlite db, chain state). " + "Default: $MINICHAIN_DATA_DIR or ~/.minichain", + ) + parser.add_argument( + "--miner-address", + default=None, + help="Optional 20-byte lowercase hex address used for mining rewards", + ) + + subparsers = parser.add_subparsers(dest="command") + + _add_node_group(subparsers) + _add_wallet_group(subparsers) + _add_tx_group(subparsers) + _add_chain_group(subparsers) + _add_mine_command(subparsers) + + shell = subparsers.add_parser("shell", help="Run interactive MiniChain CLI shell") + shell.set_defaults(action="shell") + return parser + + +def main(argv: list[str] | None = None) -> None: + parser = build_parser() + normalized_argv = _normalize_cli_tokens(argv) + args = parser.parse_args(normalized_argv) + action = getattr(args, "action", None) + + if action is None: + args.action = "node_start" + action = "node_start" + + if action == "shell": + _run_shell(parser, args) + return + + _execute_action(args) + + +def _add_node_group(subparsers: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: + node = subparsers.add_parser("node", help="Node lifecycle commands") + node_sub = node.add_subparsers(dest="node_command", required=True) + node_start = node_sub.add_parser("start", help="Start node and print chain status") + node_start.set_defaults(action="node_start") + + node_run = node_sub.add_parser( + "run", + help="Run long-lived node daemon with networking/sync (Ctrl+C to stop)", + ) + node_run.add_argument( + "--peer", + action="append", + default=[], + help="Bootstrap peer in host:port format (repeat flag to add more peers)", + ) + node_run.add_argument( + "--advertise-host", + default=None, + help="Host/IP advertised to peers (defaults to --host)", + ) + node_run.add_argument( + "--node-id", + default=None, + help="Optional stable node id string for logs/debugging", + ) + node_run.add_argument( + "--mdns", + action="store_true", + help="Enable local mDNS discovery in addition to bootstrap peers", + ) + node_run.add_argument( + "--mine", + action="store_true", + help="Enable continuous mining loop", + ) + node_run.add_argument( + "--mine-interval-seconds", + type=float, + default=2.0, + help="Sleep interval between mined blocks in daemon mode", + ) + node_run.add_argument( + "--sync-batch-size", + type=int, + default=128, + help="Max blocks transferred per sync batch", + ) + node_run.add_argument( + "--status-interval-seconds", + type=float, + default=5.0, + help="Periodic status-log interval in seconds", + ) + node_run.set_defaults(action="node_run") + + node_stop = node_sub.add_parser( + "stop", + help="Stop daemon node for this data directory via PID file", + ) + node_stop.add_argument( + "--timeout-seconds", + type=float, + default=5.0, + help="Graceful stop wait timeout before optional force-stop", + ) + node_stop.add_argument( + "--force", + action="store_true", + help="Send SIGKILL if graceful stop does not finish in timeout", + ) + node_stop.set_defaults(action="node_stop") + + +def _add_wallet_group(subparsers: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: + wallet = subparsers.add_parser("wallet", help="Wallet and account commands") + wallet_sub = wallet.add_subparsers(dest="wallet_command", required=True) + + wallet_generate = wallet_sub.add_parser("generate-key", help="Generate a new keypair") + wallet_generate.set_defaults(action="wallet_generate_key") + + wallet_balance = wallet_sub.add_parser("balance", help="Query account balance and nonce") + wallet_balance.add_argument("--address", required=True, help="20-byte lowercase hex address") + wallet_balance.set_defaults(action="wallet_balance") + + wallet_details = wallet_sub.add_parser( + "details", + help="Query account details with existence flag", + ) + wallet_details.add_argument("--address", required=True, help="20-byte lowercase hex address") + wallet_details.set_defaults(action="wallet_details") + + wallet_list = wallet_sub.add_parser("list", help="List known accounts in current state") + wallet_list.add_argument("--limit", type=int, default=100, help="max accounts to print") + wallet_list.set_defaults(action="wallet_list") + + +def _add_tx_group(subparsers: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: + tx = subparsers.add_parser("tx", help="Transaction commands") + tx_sub = tx.add_subparsers(dest="tx_command", required=True) + + tx_submit = tx_sub.add_parser("submit", help="Submit a signed transfer transaction") + _add_submit_tx_args(tx_submit) + tx_submit.set_defaults(action="tx_submit") + + +def _add_chain_group(subparsers: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: + chain = subparsers.add_parser("chain", help="Chain query commands") + chain_sub = chain.add_subparsers(dest="chain_command", required=True) + + chain_info = chain_sub.add_parser("info", help="Query chain height and canonical tip hash") + chain_info.set_defaults(action="chain_info") + + block = chain_sub.add_parser("block", help="Query a block by height or hash") + block_group = block.add_mutually_exclusive_group(required=True) + block_group.add_argument("--height", type=int, help="block height") + block_group.add_argument("--hash", dest="block_hash", help="block hash (hex)") + block.set_defaults(action="chain_block") + + accounts = chain_sub.add_parser("accounts", help="List known chain accounts") + accounts.add_argument("--limit", type=int, default=100, help="max accounts to print") + accounts.set_defaults(action="chain_accounts") + + +def _add_mine_command(subparsers: argparse._SubParsersAction[argparse.ArgumentParser]) -> None: + mine = subparsers.add_parser("mine", help="Mine one or more blocks") + mine.add_argument("--count", default=1, type=int, help="number of blocks to mine") + mine.add_argument( + "--max-transactions", + default=None, + type=int, + help="max non-coinbase tx per block", + ) + mine.set_defaults(action="mine") + + +def _add_submit_tx_args(parser: argparse.ArgumentParser) -> None: + parser.add_argument("--private-key", required=True, help="hex-encoded Ed25519 signing key") + parser.add_argument("--recipient", required=True, help="20-byte lowercase hex address") + parser.add_argument("--amount", required=True, type=int, help="transfer amount") + parser.add_argument("--fee", default=1, type=int, help="transaction fee") + parser.add_argument("--nonce", default=None, type=int, help="optional sender nonce") + parser.add_argument( + "--mine-now", + action="store_true", + help="mine one block immediately after submission (default behavior)", + ) + parser.add_argument( + "--no-mine-now", + action="store_false", + dest="mine_now", + help="do not mine immediately after submission", + ) + parser.set_defaults(mine_now=True) + + +def _execute_action(args: argparse.Namespace) -> None: + if args.action == "wallet_generate_key": + _run_generate_key() + return + if args.action == "node_run": + try: + asyncio.run(_run_node_daemon(args)) + except KeyboardInterrupt: + pass + return + if args.action == "node_stop": + _run_node_stop( + data_dir=Path(args.data_dir).expanduser(), + timeout_seconds=args.timeout_seconds, + force=args.force, + ) + return + if args.action == "tx_submit": + data_dir = Path(args.data_dir).expanduser() + if _daemon_running_for_data_dir(data_dir): + _run_submit_transaction_via_network( + data_dir=data_dir, + host=str(args.host), + port=int(args.port), + private_key_hex=args.private_key, + recipient=args.recipient, + amount=args.amount, + fee=args.fee, + nonce=args.nonce, + mine_now=args.mine_now, + ) + return + + miner_address = args.miner_address + if args.action == "tx_submit" and args.mine_now and miner_address is None: + miner_address = _infer_sender_from_private_key(args.private_key) + + node = MiniChainNode( + NodeConfig( + data_dir=Path(args.data_dir), + miner_address=miner_address, + ) + ) + node.start() + try: + if args.action == "node_start": + _print_heading("Node Status") + _print_kv( + { + "host": f"{args.host}:{args.port}", + "data_dir": str(Path(args.data_dir).expanduser()), + "chain_height": node.height, + "tip_hash": node.tip_hash, + "known_accounts": len(node.chain_manager.state.accounts), + } + ) + return + + if args.action == "wallet_balance": + _run_balance(node=node, address=args.address) + return + + if args.action == "wallet_details": + _run_wallet_details(node=node, address=args.address) + return + + if args.action == "wallet_list": + _run_wallet_list(node=node, limit=args.limit) + return + + if args.action == "chain_info": + _run_chain_info( + node=node, + data_dir=Path(args.data_dir).expanduser(), + ) + return + + if args.action == "chain_block": + _run_block_query(node=node, height=args.height, block_hash=args.block_hash) + return + + if args.action == "chain_accounts": + _run_wallet_list(node=node, limit=args.limit) + return + + if args.action == "tx_submit": + _run_submit_transaction( + node=node, + private_key_hex=args.private_key, + recipient=args.recipient, + amount=args.amount, + fee=args.fee, + nonce=args.nonce, + mine_now=args.mine_now, + ) + return + + if args.action == "mine": + _run_mine( + node=node, + count=args.count, + max_transactions=args.max_transactions, + ) + return + + raise ValueError(f"Unsupported command action: {args.action}") + finally: + node.stop() + + +def _run_shell(parser: argparse.ArgumentParser, base_args: argparse.Namespace) -> None: + print() + print("+--------------------------------------+") + print("| MiniChain Interactive Shell |") + print("+--------------------------------------+") + print(" Type 'help' for commands, 'exit' to quit.") + print() + while True: + try: + line = input("minichain >> ") + except EOFError: + print() + break + + text = line.strip() + if not text: + continue + if text in {"exit", "quit"}: + print(" Goodbye.") + break + if text in {"help", "?"}: + parser.print_help() + continue + + try: + tokens = _normalize_cli_tokens(shlex.split(text)) + shell_args = _shell_defaults(base_args) + tokens + parsed = parser.parse_args(shell_args) + if getattr(parsed, "action", None) == "shell": + print(" [error] Nested shell is not supported.") + continue + if getattr(parsed, "action", None) is None: + parsed.action = "node_start" + _execute_action(parsed) + except SystemExit: + continue + except Exception as exc: + print(f" [error] {exc}") + + +def _parse_peer_addresses(values: list[str]) -> tuple[PeerAddress, ...]: + peers: list[PeerAddress] = [] + for value in values: + peers.append(PeerAddress.from_string(value)) + return tuple(peers) + + +async def _run_node_daemon(args: argparse.Namespace) -> None: + if args.mine and args.miner_address is None: + raise ValueError("--miner-address is required when --mine is enabled") + if args.mine_interval_seconds <= 0: + raise ValueError("mine_interval_seconds must be positive") + if args.status_interval_seconds <= 0: + raise ValueError("status_interval_seconds must be positive") + + data_dir = Path(args.data_dir).expanduser() + bootstrap_peers = _parse_peer_addresses(list(args.peer)) + node = MiniChainNode( + NodeConfig( + data_dir=data_dir, + miner_address=args.miner_address, + ) + ) + node.start() + pid_file: Path | None = None + network: MiniChainNetwork | None = None + background_tasks: list[asyncio.Task[None]] = [] + try: + pid_file = _acquire_node_pid_file(data_dir) + + network = MiniChainNetwork( + NetworkConfig( + host=args.host, + port=args.port, + advertise_host=args.advertise_host, + node_id=args.node_id, + bootstrap_peers=bootstrap_peers, + enable_mdns=args.mdns, + sync_batch_size=args.sync_batch_size, + ) + ) + network.set_transaction_handler(lambda tx: _handle_network_transaction(node, tx)) + network.set_block_handler(lambda block: _handle_network_block(node, block)) + network.set_sync_handlers( + get_height=lambda: node.height, + get_block_by_height=node.chain_manager.get_canonical_block_by_height, + apply_block=lambda block: _handle_network_block(node, block), + ) + + await network.start() + _log( + "info", + "node_started " + f"listen={network.listen_host}:{network.listen_port} " + f"advertise={network.advertise_host}:{network.listen_port} " + f"node_id={network.node_id} data_dir={data_dir}", + ) + _log( + "info", + "consensus_policy=longest_valid_chain_wins " + f"height={node.height} tip={node.tip_hash}", + ) + _log( + "info", + "network_config " + f"mdns={'on' if args.mdns else 'off'} " + f"bootstrap_peers={','.join(args.peer) if args.peer else 'none'} " + f"mining={'on' if args.mine else 'off'}", + ) + + background_tasks = [ + asyncio.create_task( + _status_loop( + node=node, + network=network, + data_dir=data_dir, + interval_seconds=args.status_interval_seconds, + ) + ) + ] + if args.mine: + background_tasks.append( + asyncio.create_task( + _mining_loop( + node=node, + network=network, + interval_seconds=args.mine_interval_seconds, + ) + ) + ) + + while True: + await asyncio.sleep(3600) + finally: + for task in background_tasks: + task.cancel() + await asyncio.gather(*background_tasks, return_exceptions=True) + if network is not None: + await network.stop() + node.stop() + _release_node_pid_file(pid_file) + _clear_runtime_status(data_dir) + _log("info", "node_stopped") + + +async def _status_loop( + *, + node: MiniChainNode, + network: MiniChainNetwork, + data_dir: Path, + interval_seconds: float, +) -> None: + while True: + connected = sorted(network.connected_peer_ids()) + _write_runtime_status( + data_dir=data_dir, + height=node.height, + tip_hash=node.tip_hash, + mempool_size=node.mempool.size(), + connected_peers=len(connected), + ) + _log( + "status", + f"height={node.height} tip={node.tip_hash} " + f"mempool_size={node.mempool.size()} connected_peers={len(connected)}", + ) + await asyncio.sleep(interval_seconds) + + +async def _mining_loop( + *, + node: MiniChainNode, + network: MiniChainNetwork, + interval_seconds: float, +) -> None: + while True: + try: + block = node.mine_one_block() + broadcast = await network.submit_block(block, already_applied=True) + _log( + "mine", + f"mined_height={block.header.block_height} hash={block.hash().hex()} " + f"broadcasted={'true' if broadcast else 'false'}", + ) + except NodeError as exc: + _log("warn", f"mining_error={exc}") + await asyncio.sleep(interval_seconds) + + +def _handle_network_transaction(node: MiniChainNode, transaction: Transaction) -> bool: + try: + tx_id = node.submit_transaction(transaction) + except NodeError as exc: + _log("warn", f"tx_rejected error={exc}") + return False + _log("net", f"tx_accepted tx_id={tx_id}") + return True + + +def _handle_network_block(node: MiniChainNode, block) -> bool: + try: + result = node.accept_block(block) + except NodeError as exc: + _log("warn", f"block_rejected error={exc}") + return False + + accepted = result in {"extended", "reorg", "stored_fork", "duplicate"} + if result in {"extended", "reorg"}: + _log( + "net", + f"block_{result} height={node.height} " + f"tip={node.tip_hash} hash={block.hash().hex()}", + ) + elif result == "stored_fork": + _log("net", f"block_stored_fork hash={block.hash().hex()}") + return accepted + + +def _run_node_stop(*, data_dir: Path, timeout_seconds: float, force: bool) -> None: + if timeout_seconds <= 0: + raise ValueError("timeout_seconds must be positive") + + pid_file = _node_pid_path(data_dir) + _print_heading("Node Stop") + if not pid_file.exists(): + _print_kv( + { + "status": "not_running", + "data_dir": str(data_dir), + "pid_file": str(pid_file), + } + ) + return + + pid = _read_pid_file(pid_file) + if pid is None: + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "not_running", + "reason": "invalid_pid_file_removed", + "pid_file": str(pid_file), + } + ) + return + + if not _process_exists(pid): + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "not_running", + "reason": "stale_pid_file_removed", + "stale_pid": pid, + } + ) + return + + graceful_signal = signal.SIGINT if os.name != "nt" else signal.SIGTERM + try: + os.kill(pid, graceful_signal) + except ProcessLookupError: + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "not_running", + "reason": "process_exited_before_signal", + "stale_pid": pid, + } + ) + return + if _wait_for_exit(pid, timeout_seconds=timeout_seconds): + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "stopped", + "pid": pid, + "signal": graceful_signal.name, + "forced": "false", + } + ) + return + + if not force: + raise ValueError( + f"failed to stop pid {pid} within {timeout_seconds:.1f}s; retry with node stop --force" + ) + if not hasattr(signal, "SIGKILL"): + raise ValueError( + f"failed to stop pid {pid} within {timeout_seconds:.1f}s; force stop unavailable" + ) + + try: + os.kill(pid, signal.SIGKILL) + except ProcessLookupError: + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "not_running", + "reason": "process_exited_before_force_signal", + "stale_pid": pid, + } + ) + return + if _wait_for_exit(pid, timeout_seconds=2.0): + _release_node_pid_file(pid_file) + _print_kv( + { + "status": "stopped", + "pid": pid, + "signal": signal.SIGKILL.name, + "forced": "true", + } + ) + return + raise ValueError(f"failed to force-stop pid {pid}") + + +def _shell_defaults(base_args: argparse.Namespace) -> list[str]: + defaults = [ + "--host", + str(base_args.host), + "--port", + str(base_args.port), + "--data-dir", + str(base_args.data_dir), + ] + if base_args.miner_address is not None: + defaults.extend(["--miner-address", str(base_args.miner_address)]) + return defaults + + +def _log(level: str, message: str) -> None: + timestamp = time.strftime("%Y-%m-%d %H:%M:%S") + tag = level.upper().center(8) + print(f"[{timestamp}] [{tag}] {message}") + + +def _acquire_node_pid_file(data_dir: Path) -> Path: + data_dir.mkdir(parents=True, exist_ok=True) + pid_file = _node_pid_path(data_dir) + existing_pid = _read_pid_file(pid_file) + if existing_pid is not None and _process_exists(existing_pid): + raise ValueError( + f"node already running for data_dir={data_dir} with pid={existing_pid}; " + "use `minichain --data-dir node stop` first" + ) + if pid_file.exists(): + pid_file.unlink() + + flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY + try: + fd = os.open(str(pid_file), flags, 0o644) + except FileExistsError as exc: + race_pid = _read_pid_file(pid_file) + if race_pid is not None and _process_exists(race_pid): + raise ValueError( + f"node already running for data_dir={data_dir} with pid={race_pid}; " + "use `minichain --data-dir node stop` first" + ) from exc + raise ValueError(f"failed to acquire pid lock at {pid_file}") from exc + with os.fdopen(fd, "w", encoding="utf-8") as handle: + handle.write(f"{os.getpid()}\n") + return pid_file + + +def _release_node_pid_file(pid_file: Path | None) -> None: + if pid_file is None: + return + pid_file.unlink(missing_ok=True) + + +def _node_pid_path(data_dir: Path) -> Path: + return data_dir / NODE_PID_FILENAME + + +def _node_runtime_status_path(data_dir: Path) -> Path: + return data_dir / NODE_RUNTIME_STATUS_FILENAME + + +def _write_runtime_status( + *, + data_dir: Path, + height: int, + tip_hash: str, + mempool_size: int, + connected_peers: int, +) -> None: + payload = { + "updated_at": int(time.time()), + "height": height, + "tip_hash": tip_hash, + "mempool_size": mempool_size, + "connected_peers": connected_peers, + } + status_path = _node_runtime_status_path(data_dir) + tmp_path = status_path.with_suffix(".tmp") + with contextlib.suppress(OSError): + tmp_path.write_text(json.dumps(payload, sort_keys=True), encoding="utf-8") + tmp_path.replace(status_path) + + +def _read_runtime_status(data_dir: Path) -> dict[str, object] | None: + status_path = _node_runtime_status_path(data_dir) + if not status_path.exists(): + return None + try: + raw = status_path.read_text(encoding="utf-8") + except OSError: + return None + try: + parsed = json.loads(raw) + except json.JSONDecodeError: + return None + if not isinstance(parsed, dict): + return None + return parsed + + +def _clear_runtime_status(data_dir: Path) -> None: + _node_runtime_status_path(data_dir).unlink(missing_ok=True) + + +def _daemon_running_for_data_dir(data_dir: Path) -> bool: + pid_file = _node_pid_path(data_dir) + pid = _read_pid_file(pid_file) + if pid is None: + return False + return _process_exists(pid) + + +def _read_pid_file(pid_file: Path) -> int | None: + if not pid_file.exists(): + return None + content = pid_file.read_text(encoding="utf-8").strip() + if not content: + return None + try: + pid = int(content) + except ValueError: + return None + if pid <= 0: + return None + return pid + + +def _process_exists(pid: int) -> bool: + if pid <= 0: + return False + try: + os.kill(pid, 0) + except ProcessLookupError: + return False + except PermissionError: + return True + return True + + +def _wait_for_exit(pid: int, *, timeout_seconds: float) -> bool: + deadline = time.monotonic() + timeout_seconds + while time.monotonic() < deadline: + if not _process_exists(pid): + return True + time.sleep(0.05) + return not _process_exists(pid) + + +def _normalize_cli_tokens(argv: list[str] | None) -> list[str] | None: + if argv is None: + return None + tokens = ["--help" if token == "-help" else token for token in argv] + if not tokens: + return tokens + + command_index = _find_command_index(tokens) + if command_index < 0: + return tokens + + command = tokens[command_index] + legacy_map: dict[str, list[str]] = { + "start": ["node", "start"], + "generate-key": ["wallet", "generate-key"], + "balance": ["wallet", "balance"], + "submit-tx": ["tx", "submit"], + "chain-info": ["chain", "info"], + "block": ["chain", "block"], + "mine-legacy": ["mine"], + } + replacement = legacy_map.get(command) + if replacement is None: + return tokens + return tokens[:command_index] + replacement + tokens[command_index + 1 :] + + +def _find_command_index(tokens: list[str]) -> int: + options_with_values = {"--host", "--port", "--data-dir", "--miner-address"} + index = 0 + while index < len(tokens): + token = tokens[index] + if token in options_with_values: + index += 2 + continue + if token.startswith("-"): + index += 1 + continue + return index + return -1 + + +def _run_generate_key() -> None: + signing_key, verify_key = generate_key_pair() + private_key = serialize_signing_key(signing_key) + public_key = serialize_verify_key(verify_key) + address = derive_address(verify_key) + _print_heading("Wallet Key Material") + _print_kv( + { + "private_key": private_key, + "public_key": public_key, + "address": address, + } + ) + + +def _run_balance(*, node: MiniChainNode, address: str) -> None: + if not _is_lower_hex(address, ADDRESS_HEX_LENGTH): + raise ValueError("address must be a 20-byte lowercase hex string") + account = node.chain_manager.state.get_account(address) + _print_heading("Wallet Balance") + _print_kv({"address": address, "balance": account.balance, "nonce": account.nonce}) + + +def _run_wallet_details(*, node: MiniChainNode, address: str) -> None: + if not _is_lower_hex(address, ADDRESS_HEX_LENGTH): + raise ValueError("address must be a 20-byte lowercase hex string") + exists = address in node.chain_manager.state.accounts + account = node.chain_manager.state.get_account(address) + _print_heading("Wallet Details") + _print_kv( + { + "address": address, + "exists": "true" if exists else "false", + "balance": account.balance, + "nonce": account.nonce, + } + ) + + +def _run_wallet_list(*, node: MiniChainNode, limit: int) -> None: + if limit <= 0: + raise ValueError("limit must be positive") + accounts = sorted( + node.chain_manager.state.accounts.items(), + key=lambda item: item[0], + ) + _print_heading("Wallet Accounts") + _print_kv( + { + "total_accounts": len(accounts), + "showing": min(limit, len(accounts)), + } + ) + if accounts[:limit]: + print(f" {'#':>4} {'Address':<42} {'Balance':>12} {'Nonce':>6}") + print( + f" {'----':>4} {'------------------------------------------':<42} " + f"{'------------':>12} {'------':>6}" + ) + for index, (address, account) in enumerate(accounts[:limit], start=1): + print(f" {index:>4} {address:<42} {account.balance:>12} {account.nonce:>6}") + print() + + +def _run_chain_info(*, node: MiniChainNode, data_dir: Path) -> None: + total_supply = sum(account.balance for account in node.chain_manager.state.accounts.values()) + connected_peers = 0 + if _daemon_running_for_data_dir(data_dir): + runtime_status = _read_runtime_status(data_dir) + if runtime_status is not None: + value = runtime_status.get("connected_peers") + if isinstance(value, int) and value >= 0: + connected_peers = value + _print_heading("Chain Info") + _print_kv( + { + "height": node.height, + "tip_hash": node.tip_hash, + "accounts": len(node.chain_manager.state.accounts), + "total_supply": total_supply, + "connected_peers": connected_peers, + } + ) + + +def _run_block_query( + *, + node: MiniChainNode, + height: int | None, + block_hash: str | None, +) -> None: + if height is not None: + block = node.storage.get_block_by_height(height) + else: + if block_hash is None: + raise ValueError("block hash is required") + block = node.storage.get_block_by_hash(block_hash) + + if block is None: + print("block_not_found") + return + + payload = { + "hash": block.hash().hex(), + "header": asdict(block.header), + "transactions": [asdict(transaction) for transaction in block.transactions], + } + _print_heading("Block") + print(json.dumps(payload, sort_keys=True, indent=2)) + + +def _run_submit_transaction( + *, + node: MiniChainNode, + private_key_hex: str, + recipient: str, + amount: int, + fee: int, + nonce: int | None, + mine_now: bool, +) -> None: + if amount < 0: + raise ValueError("amount must be non-negative") + if fee < 0: + raise ValueError("fee must be non-negative") + if not _is_lower_hex(recipient, ADDRESS_HEX_LENGTH): + raise ValueError("recipient must be a 20-byte lowercase hex string") + + signing_key = deserialize_signing_key(private_key_hex) + sender_address = derive_address(signing_key.verify_key) + sender_account = node.chain_manager.state.get_account(sender_address) + resolved_nonce = sender_account.nonce if nonce is None else nonce + if resolved_nonce < 0: + raise ValueError("nonce must be non-negative") + + transaction = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=resolved_nonce, + fee=fee, + timestamp=int(time.time()), + ) + transaction.sign(signing_key) + + transaction_id = node.submit_transaction(transaction) + _print_heading("Transaction Submitted") + _print_kv( + { + "submitted_tx_id": transaction_id, + "sender": sender_address, + "recipient": recipient, + "amount": amount, + "fee": fee, + "nonce": resolved_nonce, + } + ) + + if not mine_now: + _print_kv({"queued_in_mempool": "true"}) + return + + mined_block = node.mine_one_block() + _print_kv( + { + "mined_block_height": mined_block.header.block_height, + "mined_block_hash": mined_block.hash().hex(), + } + ) + + +def _run_submit_transaction_via_network( + *, + data_dir: Path, + host: str, + port: int, + private_key_hex: str, + recipient: str, + amount: int, + fee: int, + nonce: int | None, + mine_now: bool, +) -> None: + if amount < 0: + raise ValueError("amount must be non-negative") + if fee < 0: + raise ValueError("fee must be non-negative") + if not _is_lower_hex(recipient, ADDRESS_HEX_LENGTH): + raise ValueError("recipient must be a 20-byte lowercase hex string") + + signing_key = deserialize_signing_key(private_key_hex) + sender_address = derive_address(signing_key.verify_key) + resolved_nonce = ( + _infer_sender_nonce_from_data_dir(data_dir=data_dir, sender_address=sender_address) + if nonce is None + else nonce + ) + if resolved_nonce < 0: + raise ValueError("nonce must be non-negative") + + transaction = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=resolved_nonce, + fee=fee, + timestamp=int(time.time()), + ) + transaction.sign(signing_key) + + accepted, reason = asyncio.run( + _submit_transaction_to_peer( + transaction=transaction, + host=host, + port=port, + timeout_seconds=3.0, + ) + ) + if not accepted: + detail = reason or "unknown rejection" + raise ValueError(f"Transaction rejected by running node at {host}:{port}: {detail}") + + _print_heading("Transaction Submitted") + _print_kv( + { + "submitted_tx_id": transaction.transaction_id().hex(), + "sender": sender_address, + "recipient": recipient, + "amount": amount, + "fee": fee, + "nonce": resolved_nonce, + "submitted_via": "network", + "peer": f"{host}:{port}", + "queued_in_mempool": "true", + } + ) + if mine_now: + _print_kv({"note": "daemon mode active; block inclusion depends on node run --mine"}) + + +def _infer_sender_nonce_from_data_dir(*, data_dir: Path, sender_address: str) -> int: + node = MiniChainNode(NodeConfig(data_dir=data_dir)) + try: + node.start() + return node.chain_manager.state.get_account(sender_address).nonce + except Exception as exc: + raise ValueError( + "unable to infer sender nonce from local state while daemon is running; " + "pass --nonce explicitly" + ) from exc + finally: + with contextlib.suppress(Exception): + node.stop() + + +async def _submit_transaction_to_peer( + *, + transaction: Transaction, + host: str, + port: int, + timeout_seconds: float, +) -> tuple[bool, str]: + if timeout_seconds <= 0: + raise ValueError("timeout_seconds must be positive") + peer = PeerAddress(host=host, port=port) + peer.validate() + + reader, writer = await asyncio.wait_for( + asyncio.open_connection(host, port), + timeout=timeout_seconds, + ) + try: + await _write_json_line( + writer, + { + "type": "hello", + "node_id": f"cli-submit-{os.getpid()}-{int(time.time())}", + "host": "127.0.0.1", + "port": 0, + }, + ) + await _wait_for_peer_hello(reader=reader, timeout_seconds=timeout_seconds) + + tx_id = transaction.transaction_id().hex() + await _write_json_line( + writer, + { + "type": "tx_gossip", + "protocol": TX_GOSSIP_PROTOCOL_ID, + "transaction_id": tx_id, + "transaction": asdict(transaction), + }, + ) + return await _wait_for_tx_result( + reader=reader, + expected_transaction_id=tx_id, + timeout_seconds=timeout_seconds, + ) + finally: + writer.close() + with contextlib.suppress(Exception): + await writer.wait_closed() + + +async def _wait_for_peer_hello(*, reader: asyncio.StreamReader, timeout_seconds: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout_seconds + while asyncio.get_running_loop().time() < deadline: + remaining = max(0.01, deadline - asyncio.get_running_loop().time()) + message = await _read_json_line( + reader=reader, + timeout_seconds=remaining, + eof_ok=True, + ) + if message is None: + break + if message.get("type") == "hello": + return + raise TimeoutError("timed out waiting for node handshake response") + + +async def _wait_for_tx_result( + *, + reader: asyncio.StreamReader, + expected_transaction_id: str, + timeout_seconds: float, +) -> tuple[bool, str]: + deadline = asyncio.get_running_loop().time() + timeout_seconds + while asyncio.get_running_loop().time() < deadline: + remaining = max(0.01, deadline - asyncio.get_running_loop().time()) + message = await _read_json_line( + reader=reader, + timeout_seconds=remaining, + eof_ok=True, + ) + if message is None: + break + if message.get("type") != "tx_result": + continue + transaction_id = message.get("transaction_id") + if transaction_id != expected_transaction_id: + continue + accepted = bool(message.get("accepted")) + reason = message.get("reason") + if not isinstance(reason, str): + reason = "unknown" + return accepted, reason + raise TimeoutError("timed out waiting for transaction result from node") + + +async def _read_json_line( + *, + reader: asyncio.StreamReader, + timeout_seconds: float, + eof_ok: bool, +) -> dict[str, object] | None: + line = await asyncio.wait_for(reader.readline(), timeout=timeout_seconds) + if not line: + if eof_ok: + return None + raise ValueError("unexpected EOF while reading response") + payload = json.loads(line.decode("utf-8")) + if not isinstance(payload, dict): + raise ValueError("response payload must be a JSON object") + return payload + + +async def _write_json_line(writer: asyncio.StreamWriter, payload: dict[str, object]) -> None: + writer.write(json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + b"\n") + await writer.drain() + + +def _run_mine( + *, + node: MiniChainNode, + count: int, + max_transactions: int | None, +) -> None: + if count <= 0: + raise ValueError("count must be positive") + _print_heading("Mining") + for index in range(1, count + 1): + block = node.mine_one_block(max_transactions=max_transactions) + print( + f" [{index}/{count}] " + f"mined_block_{index}=height:{block.header.block_height},hash:{block.hash().hex()}" + ) + print(f"\n Done. Mined {count} block(s).") + print() + + +def _infer_sender_from_private_key(private_key_hex: str) -> str: + signing_key = deserialize_signing_key(private_key_hex) + return derive_address(signing_key.verify_key) + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +def _print_heading(title: str) -> None: + width = max(len(title) + 4, 40) + border = "+" + "-" * (width - 2) + "+" + print() + print(border) + print("| " + title.center(width - 4) + " |") + print(border) + + +def _print_kv(values: dict[str, object]) -> None: + if not values: + return + max_key_len = max(len(str(k)) for k in values) + for key, value in values.items(): + print(f" {str(key).ljust(max_key_len)} = {value}") + print() + + +if __name__ == "__main__": + main() diff --git a/src/minichain/block.py b/src/minichain/block.py new file mode 100644 index 0000000..01f8a1e --- /dev/null +++ b/src/minichain/block.py @@ -0,0 +1,84 @@ +"""Block primitives and block-level validation logic.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +from minichain.crypto import blake2b_digest +from minichain.merkle import compute_merkle_root +from minichain.serialization import serialize_block_header +from minichain.transaction import Transaction + + +class BlockValidationError(ValueError): + """Raised when a block fails structural or semantic validation.""" + + +@dataclass +class BlockHeader: + """Consensus-critical block header.""" + + version: int + previous_hash: str + merkle_root: str + timestamp: int + difficulty_target: int + nonce: int + block_height: int + + def hash(self) -> bytes: + """Compute the canonical block-header hash.""" + return blake2b_digest(serialize_block_header(self)) + + def hash_hex(self) -> str: + return self.hash().hex() + + +@dataclass +class Block: + """A block containing a header and ordered transactions.""" + + header: BlockHeader + transactions: list[Transaction] = field(default_factory=list) + + def transaction_hashes(self) -> list[bytes]: + return [tx.transaction_id() for tx in self.transactions] + + def computed_merkle_root(self) -> bytes: + return compute_merkle_root(self.transaction_hashes()) + + def computed_merkle_root_hex(self) -> str: + return self.computed_merkle_root().hex() + + def update_header_merkle_root(self) -> None: + self.header.merkle_root = self.computed_merkle_root_hex() + + def has_valid_merkle_root(self) -> bool: + return self.header.merkle_root == self.computed_merkle_root_hex() + + def validate_coinbase(self, *, block_reward: int) -> None: + """Validate coinbase placement and reward accounting.""" + if block_reward < 0: + raise BlockValidationError("block_reward must be non-negative") + if not self.transactions: + raise BlockValidationError("Block must contain a coinbase transaction") + if not self.has_valid_merkle_root(): + raise BlockValidationError("Block merkle_root does not match body") + + coinbase = self.transactions[0] + if not coinbase.is_coinbase(): + raise BlockValidationError("First transaction must be a valid coinbase") + + for transaction in self.transactions[1:]: + if transaction.is_coinbase(): + raise BlockValidationError("Coinbase transaction must only appear once") + + total_fees = sum(transaction.fee for transaction in self.transactions[1:]) + expected_amount = block_reward + total_fees + if coinbase.amount != expected_amount: + raise BlockValidationError( + f"Invalid coinbase amount: expected {expected_amount}, got {coinbase.amount}" + ) + + def hash(self) -> bytes: + return self.header.hash() diff --git a/src/minichain/chain.py b/src/minichain/chain.py new file mode 100644 index 0000000..acfc4a4 --- /dev/null +++ b/src/minichain/chain.py @@ -0,0 +1,224 @@ +"""Chain manager and fork-resolution logic.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.block import Block, BlockHeader +from minichain.consensus import compute_next_difficulty_target, is_valid_pow +from minichain.genesis import GENESIS_PREVIOUS_HASH +from minichain.state import State, StateTransitionError + + +class ChainValidationError(ValueError): + """Raised when a block or branch is invalid.""" + + +@dataclass(frozen=True) +class ChainConfig: + """Configuration for chain validation and state transitions.""" + + block_reward: int = 50 + difficulty_adjustment_interval: int = 10 + target_block_time_seconds: int = 30 + + def validate(self) -> None: + if self.block_reward < 0: + raise ValueError("block_reward must be non-negative") + if self.difficulty_adjustment_interval <= 0: + raise ValueError("difficulty_adjustment_interval must be positive") + if self.target_block_time_seconds <= 0: + raise ValueError("target_block_time_seconds must be positive") + + +class ChainManager: + """Maintains canonical chain, block index, and current canonical state.""" + + def __init__( + self, + *, + genesis_block: Block, + genesis_state: State, + config: ChainConfig | None = None, + ) -> None: + self.config = config or ChainConfig() + self.config.validate() + self._validate_genesis(genesis_block) + + genesis_hash = genesis_block.hash().hex() + self._genesis_hash = genesis_hash + self._blocks_by_hash: dict[str, Block] = {genesis_hash: genesis_block} + self._heights: dict[str, int] = {genesis_hash: 0} + self._canonical_hashes: list[str] = [genesis_hash] + self._tip_hash = genesis_hash + + self._genesis_state = genesis_state.copy() + self.state = genesis_state.copy() + + @property + def tip_hash(self) -> str: + return self._tip_hash + + @property + def height(self) -> int: + return self._heights[self._tip_hash] + + @property + def tip_block(self) -> Block: + return self._blocks_by_hash[self._tip_hash] + + def contains_block(self, block_hash: str) -> bool: + return block_hash in self._blocks_by_hash + + def canonical_chain(self) -> list[Block]: + return [self._blocks_by_hash[block_hash] for block_hash in self._canonical_hashes] + + def get_block_by_hash(self, block_hash: str) -> Block | None: + return self._blocks_by_hash.get(block_hash) + + def get_canonical_block_by_height(self, height: int) -> Block | None: + if height < 0 or height >= len(self._canonical_hashes): + return None + return self._blocks_by_hash[self._canonical_hashes[height]] + + def expected_next_difficulty(self, *, parent_hash: str | None = None) -> int: + """Compute the expected next block target after the given parent.""" + path_hashes = ( + self._canonical_hashes + if parent_hash is None + else self._path_from_genesis(parent_hash) + ) + headers = [self._blocks_by_hash[block_hash].header for block_hash in path_hashes] + return compute_next_difficulty_target( + headers, + adjustment_interval=self.config.difficulty_adjustment_interval, + target_block_time_seconds=self.config.target_block_time_seconds, + ) + + def add_block(self, block: Block) -> str: + """Add a block to chain storage and update canonical tip when appropriate.""" + block_hash = block.hash().hex() + if block_hash in self._blocks_by_hash: + return "duplicate" + + parent_hash = block.header.previous_hash + if parent_hash not in self._blocks_by_hash: + raise ChainValidationError(f"Unknown parent block: {parent_hash}") + + self._blocks_by_hash[block_hash] = block + self._heights[block_hash] = block.header.block_height + + try: + candidate_path, candidate_state = self._replay_state_for_tip(block_hash) + except ChainValidationError: + self._blocks_by_hash.pop(block_hash, None) + self._heights.pop(block_hash, None) + raise + + parent_is_tip = parent_hash == self._tip_hash + candidate_height = len(candidate_path) - 1 + canonical_height = self.height + + if parent_is_tip and candidate_height == canonical_height + 1: + self._canonical_hashes.append(block_hash) + self._tip_hash = block_hash + self.state = candidate_state + return "extended" + + if candidate_height > canonical_height: + self._canonical_hashes = candidate_path + self._tip_hash = block_hash + self.state = candidate_state + return "reorg" + + return "stored_fork" + + def _replay_state_for_tip(self, tip_hash: str) -> tuple[list[str], State]: + path_hashes = self._path_from_genesis(tip_hash) + replay_state = self._genesis_state.copy() + replayed_headers = [self._blocks_by_hash[path_hashes[0]].header] + + for index, block_hash in enumerate(path_hashes[1:], start=1): + block = self._blocks_by_hash[block_hash] + parent_hash = path_hashes[index - 1] + parent_header = replayed_headers[-1] + + self._validate_link( + parent_hash=parent_hash, + parent_height=parent_header.block_height, + block=block, + ) + self._validate_consensus(block=block, parent_headers=replayed_headers) + + try: + replay_state.apply_block(block, block_reward=self.config.block_reward) + except StateTransitionError as exc: + raise ChainValidationError(f"State transition failed: {exc}") from exc + + replayed_headers.append(block.header) + + return path_hashes, replay_state + + def _path_from_genesis(self, tip_hash: str) -> list[str]: + if tip_hash not in self._blocks_by_hash: + raise ChainValidationError(f"Unknown block hash: {tip_hash}") + + path: list[str] = [] + seen: set[str] = set() + cursor = tip_hash + while True: + if cursor in seen: + raise ChainValidationError("Cycle detected in block ancestry") + seen.add(cursor) + path.append(cursor) + + if cursor == self._genesis_hash: + break + + parent_hash = self._blocks_by_hash[cursor].header.previous_hash + if parent_hash not in self._blocks_by_hash: + raise ChainValidationError( + f"Missing ancestor for block {cursor}: {parent_hash}" + ) + cursor = parent_hash + + path.reverse() + if path[0] != self._genesis_hash: + raise ChainValidationError("Candidate chain does not start at genesis") + return path + + def _validate_consensus(self, *, block: Block, parent_headers: list[BlockHeader]) -> None: + if not block.has_valid_merkle_root(): + raise ChainValidationError("Block merkle_root does not match transaction body") + + expected_target = compute_next_difficulty_target( + parent_headers, + adjustment_interval=self.config.difficulty_adjustment_interval, + target_block_time_seconds=self.config.target_block_time_seconds, + ) + if block.header.difficulty_target != expected_target: + raise ChainValidationError( + "Invalid difficulty target: " + f"expected {expected_target}, got {block.header.difficulty_target}" + ) + if not is_valid_pow(block.header): + raise ChainValidationError("Block does not satisfy Proof-of-Work target") + + @staticmethod + def _validate_link(*, parent_hash: str, parent_height: int, block: Block) -> None: + if block.header.previous_hash != parent_hash: + raise ChainValidationError("Block previous_hash does not match parent hash") + expected_height = parent_height + 1 + if block.header.block_height != expected_height: + raise ChainValidationError( + f"Invalid block height: expected {expected_height}, got {block.header.block_height}" + ) + + @staticmethod + def _validate_genesis(genesis_block: Block) -> None: + if genesis_block.header.block_height != 0: + raise ValueError("Genesis block height must be 0") + if genesis_block.header.previous_hash != GENESIS_PREVIOUS_HASH: + raise ValueError("Genesis previous_hash must be all zeros") + if genesis_block.transactions: + raise ValueError("Genesis block must not include transactions") diff --git a/src/minichain/consensus.py b/src/minichain/consensus.py new file mode 100644 index 0000000..7339569 --- /dev/null +++ b/src/minichain/consensus.py @@ -0,0 +1,101 @@ +"""Consensus and Proof-of-Work mining primitives.""" + +from __future__ import annotations + +from dataclasses import replace +from threading import Event +from typing import Sequence + +from minichain.block import BlockHeader + +MAX_TARGET = (1 << 256) - 1 + + +class MiningInterrupted(Exception): + """Raised when mining is cancelled via a stop signal.""" + + +def hash_to_int(block_hash: bytes) -> int: + """Convert a hash digest into a big-endian integer.""" + return int.from_bytes(block_hash, byteorder="big", signed=False) + + +def validate_difficulty_target(target: int) -> None: + """Validate difficulty target bounds.""" + if target <= 0: + raise ValueError("difficulty_target must be positive") + if target > MAX_TARGET: + raise ValueError("difficulty_target exceeds hash space") + + +def is_valid_pow(header: BlockHeader) -> bool: + """Return whether a header satisfies its own difficulty target.""" + if header.difficulty_target <= 0 or header.difficulty_target > MAX_TARGET: + return False + return hash_to_int(header.hash()) <= header.difficulty_target + + +def compute_next_difficulty_target( + chain: Sequence[BlockHeader], + *, + adjustment_interval: int = 10, + target_block_time_seconds: int = 30, +) -> int: + """Compute the next difficulty target using bounded proportional retargeting.""" + if adjustment_interval <= 0: + raise ValueError("adjustment_interval must be positive") + if target_block_time_seconds <= 0: + raise ValueError("target_block_time_seconds must be positive") + if not chain: + raise ValueError("chain must contain at least one header") + + tip = chain[-1] + validate_difficulty_target(tip.difficulty_target) + + if tip.block_height == 0: + return tip.difficulty_target + if tip.block_height % adjustment_interval != 0: + return tip.difficulty_target + if len(chain) <= adjustment_interval: + return tip.difficulty_target + + start_header = chain[-(adjustment_interval + 1)] + elapsed_seconds = tip.timestamp - start_header.timestamp + if elapsed_seconds <= 0: + elapsed_seconds = 1 + + expected_seconds = adjustment_interval * target_block_time_seconds + unbounded_target = (tip.difficulty_target * elapsed_seconds) // expected_seconds + + min_target = max(1, tip.difficulty_target // 2) + max_target = min(MAX_TARGET, tip.difficulty_target * 2) + bounded_target = min(max(unbounded_target, min_target), max_target) + + validate_difficulty_target(bounded_target) + return bounded_target + + +def mine_block_header( + header_template: BlockHeader, + *, + start_nonce: int = 0, + max_nonce: int = (1 << 64) - 1, + stop_event: Event | None = None, +) -> tuple[int, bytes]: + """Search nonces until a header hash satisfies the difficulty target.""" + validate_difficulty_target(header_template.difficulty_target) + if start_nonce < 0: + raise ValueError("start_nonce must be non-negative") + if max_nonce < start_nonce: + raise ValueError("max_nonce must be greater than or equal to start_nonce") + + for nonce in range(start_nonce, max_nonce + 1): + if stop_event is not None and stop_event.is_set(): + raise MiningInterrupted("Mining interrupted by stop event") + + candidate = replace(header_template, nonce=nonce) + digest = candidate.hash() + if hash_to_int(digest) <= candidate.difficulty_target: + return nonce, digest + + raise RuntimeError("No valid nonce found within nonce range") diff --git a/src/minichain/crypto.py b/src/minichain/crypto.py new file mode 100644 index 0000000..ede02d6 --- /dev/null +++ b/src/minichain/crypto.py @@ -0,0 +1,84 @@ +"""Cryptographic identity and signature helpers.""" + +from __future__ import annotations + +from typing import Any + +try: + from nacl.encoding import HexEncoder, RawEncoder + from nacl.exceptions import BadSignatureError + from nacl.hash import blake2b + from nacl.signing import SigningKey, VerifyKey +except ModuleNotFoundError as exc: # pragma: no cover - exercised in dependency-light envs + _NACL_IMPORT_ERROR = exc + HexEncoder = RawEncoder = None # type: ignore[assignment] + BadSignatureError = Exception # type: ignore[assignment] + SigningKey = VerifyKey = Any # type: ignore[assignment] + +ADDRESS_LENGTH_BYTES = 20 + + +def _require_nacl() -> None: + if "blake2b" not in globals(): + msg = "PyNaCl is required for minichain.crypto. Install with: pip install PyNaCl" + raise RuntimeError(msg) from _NACL_IMPORT_ERROR + + +def generate_key_pair() -> tuple[SigningKey, VerifyKey]: + """Generate a new Ed25519 keypair.""" + _require_nacl() + signing_key = SigningKey.generate() + return signing_key, signing_key.verify_key + + +def derive_address(verify_key: VerifyKey) -> str: + """Derive a 20-byte address from a verify key as lowercase hex.""" + _require_nacl() + digest = blake2b_digest(verify_key.encode()) + return digest[:ADDRESS_LENGTH_BYTES].hex() + + +def blake2b_digest(data: bytes) -> bytes: + """Compute a 32-byte BLAKE2b digest.""" + _require_nacl() + return blake2b(data, encoder=RawEncoder) + + +def serialize_signing_key(signing_key: SigningKey) -> str: + """Serialize a signing key into a hex string.""" + _require_nacl() + return signing_key.encode(encoder=HexEncoder).decode("ascii") + + +def deserialize_signing_key(signing_key_hex: str) -> SigningKey: + """Deserialize a signing key from a hex string.""" + _require_nacl() + return SigningKey(signing_key_hex, encoder=HexEncoder) + + +def serialize_verify_key(verify_key: VerifyKey) -> str: + """Serialize a verify key into a hex string.""" + _require_nacl() + return verify_key.encode(encoder=HexEncoder).decode("ascii") + + +def deserialize_verify_key(verify_key_hex: str) -> VerifyKey: + """Deserialize a verify key from a hex string.""" + _require_nacl() + return VerifyKey(verify_key_hex, encoder=HexEncoder) + + +def sign_message(message: bytes, signing_key: SigningKey) -> bytes: + """Sign bytes and return the detached signature bytes.""" + _require_nacl() + return signing_key.sign(message).signature + + +def verify_signature(message: bytes, signature: bytes, verify_key: VerifyKey) -> bool: + """Verify a detached Ed25519 signature.""" + _require_nacl() + try: + verify_key.verify(message, signature) + except BadSignatureError: + return False + return True diff --git a/src/minichain/genesis.py b/src/minichain/genesis.py new file mode 100644 index 0000000..e8e253d --- /dev/null +++ b/src/minichain/genesis.py @@ -0,0 +1,81 @@ +"""Genesis block/state creation and application.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +from minichain.block import Block, BlockHeader +from minichain.crypto import blake2b_digest +from minichain.state import Account, State + +GENESIS_PREVIOUS_HASH = "00" * 32 + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +@dataclass(frozen=True) +class GenesisConfig: + """Configurable parameters for building genesis artifacts.""" + + initial_balances: dict[str, int] = field(default_factory=dict) + timestamp: int = 1_739_000_000 + difficulty_target: int = (1 << 255) - 1 + version: int = 0 + + def validate(self) -> None: + if self.timestamp < 0: + raise ValueError("Genesis timestamp must be non-negative") + if self.difficulty_target <= 0: + raise ValueError("Genesis difficulty_target must be positive") + for address, balance in self.initial_balances.items(): + if not _is_lower_hex(address, 40): + raise ValueError(f"Invalid genesis address: {address}") + if balance < 0: + raise ValueError(f"Negative genesis balance for {address}") + + +def create_genesis_block(config: GenesisConfig) -> Block: + """Build the genesis block (height 0, no PoW check required).""" + config.validate() + header = BlockHeader( + version=config.version, + previous_hash=GENESIS_PREVIOUS_HASH, + merkle_root=blake2b_digest(b"").hex(), + timestamp=config.timestamp, + difficulty_target=config.difficulty_target, + nonce=0, + block_height=0, + ) + return Block(header=header, transactions=[]) + + +def apply_genesis_block(state: State, block: Block, config: GenesisConfig) -> None: + """Apply genesis allocations to an empty state.""" + config.validate() + if state.accounts: + raise ValueError("Genesis can only be applied to an empty state") + if block.header.block_height != 0: + raise ValueError("Genesis block height must be 0") + if block.header.previous_hash != GENESIS_PREVIOUS_HASH: + raise ValueError("Genesis previous_hash must be all zeros") + if block.transactions: + raise ValueError("Genesis block must not contain transactions") + + expected_merkle_root = blake2b_digest(b"").hex() + if block.header.merkle_root != expected_merkle_root: + raise ValueError("Genesis merkle_root must commit to an empty tx list") + + for address, balance in config.initial_balances.items(): + state.set_account(address, Account(balance=balance, nonce=0)) + + +def create_genesis_state(config: GenesisConfig) -> tuple[Block, State]: + """Create genesis block and initialized state in one step.""" + block = create_genesis_block(config) + state = State() + apply_genesis_block(state, block, config) + return block, state diff --git a/src/minichain/mempool.py b/src/minichain/mempool.py new file mode 100644 index 0000000..a9e1db1 --- /dev/null +++ b/src/minichain/mempool.py @@ -0,0 +1,247 @@ +"""Mempool data structures and transaction selection logic.""" + +from __future__ import annotations + +import time +from dataclasses import dataclass, field +from heapq import heappop, heappush +from typing import Iterable + +from minichain.state import Account, State +from minichain.transaction import Transaction + + +class MempoolValidationError(ValueError): + """Raised when a transaction cannot be accepted into the mempool.""" + + +@dataclass +class _PoolEntry: + transaction: Transaction + transaction_id: str + received_at: int + + @property + def fee(self) -> int: + return self.transaction.fee + + +@dataclass +class _SenderPool: + entries: dict[int, _PoolEntry] = field(default_factory=dict) + ready_nonces: set[int] = field(default_factory=set) + waiting_nonces: set[int] = field(default_factory=set) + + +class Mempool: + """Holds validated pending transactions and exposes mining selection.""" + + def __init__(self, *, max_size: int = 1_000, max_age_seconds: int = 3_600) -> None: + if max_size <= 0: + raise ValueError("max_size must be positive") + if max_age_seconds <= 0: + raise ValueError("max_age_seconds must be positive") + + self.max_size = max_size + self.max_age_seconds = max_age_seconds + self._entries_by_id: dict[str, _PoolEntry] = {} + self._sender_pools: dict[str, _SenderPool] = {} + self._id_by_sender_nonce: dict[tuple[str, int], str] = {} + + def size(self) -> int: + return len(self._entries_by_id) + + def ready_count(self) -> int: + return sum(len(pool.ready_nonces) for pool in self._sender_pools.values()) + + def waiting_count(self) -> int: + return sum(len(pool.waiting_nonces) for pool in self._sender_pools.values()) + + def contains(self, transaction_id: str) -> bool: + return transaction_id in self._entries_by_id + + def add_transaction( + self, + transaction: Transaction, + state: State, + *, + received_at: int | None = None, + ) -> str: + """Validate and enqueue a transaction, returning its transaction id.""" + if transaction.is_coinbase(): + raise MempoolValidationError("Coinbase transactions are not accepted") + if not transaction.verify(): + raise MempoolValidationError("Transaction failed signature/identity validation") + + transaction_id = transaction.transaction_id().hex() + if transaction_id in self._entries_by_id: + raise MempoolValidationError("Duplicate transaction") + + sender = transaction.sender + nonce_key = (sender, transaction.nonce) + if nonce_key in self._id_by_sender_nonce: + raise MempoolValidationError("Duplicate sender nonce in mempool") + + sender_account = state.accounts.get(sender, Account()) + if transaction.nonce < sender_account.nonce: + raise MempoolValidationError("Transaction nonce is stale") + + if transaction.nonce == sender_account.nonce: + immediate_cost = transaction.amount + transaction.fee + if immediate_cost > sender_account.balance: + raise MempoolValidationError("Insufficient balance for pending transaction") + + entry = _PoolEntry( + transaction=transaction, + transaction_id=transaction_id, + received_at=int(time.time()) if received_at is None else received_at, + ) + + pool = self._sender_pools.setdefault(sender, _SenderPool()) + pool.entries[transaction.nonce] = entry + self._entries_by_id[transaction_id] = entry + self._id_by_sender_nonce[nonce_key] = transaction_id + self._recompute_sender_pool(sender, state) + self.evict(state, current_time=entry.received_at) + return transaction_id + + def get_transactions_for_mining( + self, state: State, *, limit: int, current_time: int | None = None + ) -> list[Transaction]: + """Return up to `limit` ready transactions, prioritized by fee.""" + if limit <= 0: + return [] + + now = int(time.time()) if current_time is None else current_time + self.evict(state, current_time=now) + + sender_ready: dict[str, list[_PoolEntry]] = {} + for sender, pool in self._sender_pools.items(): + self._recompute_sender_pool(sender, state) + ready_entries = sorted( + (pool.entries[nonce] for nonce in pool.ready_nonces), + key=lambda entry: entry.transaction.nonce, + ) + if ready_entries: + sender_ready[sender] = ready_entries + + heap: list[tuple[int, int, str, int]] = [] + for sender, entries in sender_ready.items(): + first = entries[0] + heappush(heap, (-first.fee, first.transaction.nonce, sender, 0)) + + selected: list[Transaction] = [] + while heap and len(selected) < limit: + _neg_fee, _nonce, sender, index = heappop(heap) + entry = sender_ready[sender][index] + selected.append(entry.transaction) + + next_index = index + 1 + if next_index < len(sender_ready[sender]): + nxt = sender_ready[sender][next_index] + heappush(heap, (-nxt.fee, nxt.transaction.nonce, sender, next_index)) + + return selected + + def remove_confirmed_transactions( + self, + transactions: Iterable[Transaction], + state: State, + ) -> None: + """Remove transactions confirmed in a block and revalidate sender queues.""" + touched_senders: set[str] = set() + for transaction in transactions: + transaction_id = transaction.transaction_id().hex() + entry = self._entries_by_id.get(transaction_id) + if entry is None: + continue + touched_senders.add(entry.transaction.sender) + self._remove_entry(entry) + + for sender in touched_senders: + self._recompute_sender_pool(sender, state) + + for sender in list(self._sender_pools): + self._recompute_sender_pool(sender, state) + + def evict(self, state: State, *, current_time: int | None = None) -> list[str]: + """Evict stale transactions and, if oversized, low-fee transactions.""" + now = int(time.time()) if current_time is None else current_time + evicted_ids: list[str] = [] + + stale_ids = [ + tx_id + for tx_id, entry in self._entries_by_id.items() + if now - entry.received_at > self.max_age_seconds + ] + for tx_id in stale_ids: + entry = self._entries_by_id.get(tx_id) + if entry is None: + continue + evicted_ids.append(tx_id) + self._remove_entry(entry) + + while len(self._entries_by_id) > self.max_size: + entry = min( + self._entries_by_id.values(), + key=lambda item: (item.fee, item.received_at), + ) + evicted_ids.append(entry.transaction_id) + self._remove_entry(entry) + + for sender in list(self._sender_pools): + self._recompute_sender_pool(sender, state) + + return evicted_ids + + def _recompute_sender_pool(self, sender: str, state: State) -> None: + pool = self._sender_pools.get(sender) + if pool is None: + return + + account = state.accounts.get(sender, Account()) + state_nonce = account.nonce + available_balance = account.balance + + for nonce in [nonce for nonce in pool.entries if nonce < state_nonce]: + self._remove_entry(pool.entries[nonce]) + + pool = self._sender_pools.get(sender) + if pool is None: + return + + ready_nonces: set[int] = set() + expected_nonce = state_nonce + while expected_nonce in pool.entries: + candidate = pool.entries[expected_nonce].transaction + candidate_cost = candidate.amount + candidate.fee + if candidate_cost > available_balance: + break + ready_nonces.add(expected_nonce) + available_balance -= candidate_cost + expected_nonce += 1 + + all_nonces = set(pool.entries.keys()) + pool.ready_nonces = ready_nonces + pool.waiting_nonces = all_nonces - ready_nonces + + if not pool.entries: + self._sender_pools.pop(sender, None) + + def _remove_entry(self, entry: _PoolEntry) -> None: + transaction = entry.transaction + sender = transaction.sender + nonce = transaction.nonce + + self._entries_by_id.pop(entry.transaction_id, None) + self._id_by_sender_nonce.pop((sender, nonce), None) + + pool = self._sender_pools.get(sender) + if pool is None: + return + + pool.entries.pop(nonce, None) + pool.ready_nonces.discard(nonce) + pool.waiting_nonces.discard(nonce) + if not pool.entries: + self._sender_pools.pop(sender, None) diff --git a/src/minichain/merkle.py b/src/minichain/merkle.py new file mode 100644 index 0000000..d043f81 --- /dev/null +++ b/src/minichain/merkle.py @@ -0,0 +1,27 @@ +"""Merkle tree construction for transaction commitments.""" + +from __future__ import annotations + +from minichain.crypto import blake2b_digest + + +def _hash_pair(left: bytes, right: bytes) -> bytes: + return blake2b_digest(left + right) + + +def compute_merkle_root(leaves: list[bytes]) -> bytes: + """Compute the Merkle root from pre-hashed leaf bytes.""" + if not leaves: + return blake2b_digest(b"") + + level = [bytes(leaf) for leaf in leaves] + while len(level) > 1: + if len(level) % 2 == 1: + level.append(level[-1]) + + next_level: list[bytes] = [] + for i in range(0, len(level), 2): + next_level.append(_hash_pair(level[i], level[i + 1])) + level = next_level + + return level[0] diff --git a/src/minichain/mining.py b/src/minichain/mining.py new file mode 100644 index 0000000..140e391 --- /dev/null +++ b/src/minichain/mining.py @@ -0,0 +1,90 @@ +"""Block construction utilities for miners.""" + +from __future__ import annotations + +import time +from dataclasses import replace +from threading import Event + +from minichain.block import Block, BlockHeader +from minichain.chain import ChainManager +from minichain.consensus import mine_block_header +from minichain.mempool import Mempool +from minichain.transaction import ADDRESS_HEX_LENGTH, create_coinbase_transaction + + +class BlockConstructionError(ValueError): + """Raised when a candidate block cannot be constructed.""" + + +def build_candidate_block( + *, + chain_manager: ChainManager, + mempool: Mempool, + miner_address: str, + max_transactions: int, + timestamp: int | None = None, +) -> Block: + """Build a candidate block template from chain tip and mempool.""" + if max_transactions < 0: + raise BlockConstructionError("max_transactions must be non-negative") + if not _is_lower_hex(miner_address, ADDRESS_HEX_LENGTH): + raise BlockConstructionError("miner_address must be a 20-byte lowercase hex string") + + parent = chain_manager.tip_block + parent_hash = chain_manager.tip_hash + base_timestamp = int(time.time()) if timestamp is None else timestamp + if base_timestamp < 0: + raise BlockConstructionError("timestamp must be non-negative") + block_timestamp = max(base_timestamp, parent.header.timestamp + 1) + + selected_transactions = mempool.get_transactions_for_mining( + chain_manager.state, + limit=max_transactions, + current_time=block_timestamp, + ) + total_fees = sum(transaction.fee for transaction in selected_transactions) + coinbase_amount = chain_manager.config.block_reward + total_fees + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=coinbase_amount, + timestamp=block_timestamp, + ) + + header = BlockHeader( + version=parent.header.version, + previous_hash=parent_hash, + merkle_root="", + timestamp=block_timestamp, + difficulty_target=chain_manager.expected_next_difficulty(parent_hash=parent_hash), + nonce=0, + block_height=parent.header.block_height + 1, + ) + candidate = Block(header=header, transactions=[coinbase, *selected_transactions]) + candidate.update_header_merkle_root() + return candidate + + +def mine_candidate_block( + *, + block_template: Block, + start_nonce: int = 0, + max_nonce: int = (1 << 64) - 1, + stop_event: Event | None = None, +) -> tuple[Block, bytes]: + """Search for a valid nonce and return a mined copy of the block.""" + nonce, digest = mine_block_header( + block_template.header, + start_nonce=start_nonce, + max_nonce=max_nonce, + stop_event=stop_event, + ) + mined_header = replace(block_template.header, nonce=nonce) + mined_block = Block(header=mined_header, transactions=list(block_template.transactions)) + return mined_block, digest + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) diff --git a/src/minichain/network.py b/src/minichain/network.py new file mode 100644 index 0000000..967bd79 --- /dev/null +++ b/src/minichain/network.py @@ -0,0 +1,1075 @@ +"""Peer-to-peer networking and peer discovery for MiniChain.""" + +from __future__ import annotations + +import asyncio +import contextlib +import json +import secrets +import socket +import struct +import time +from collections import deque +from dataclasses import asdict, dataclass, field +from typing import Any, Callable, Coroutine + +from minichain.block import Block, BlockHeader +from minichain.transaction import Transaction + +_LOCAL_DISCOVERY_REGISTRY: set["MiniChainNetwork"] = set() +TX_GOSSIP_PROTOCOL_ID = "/minichain/tx/1.0.0" +BLOCK_GOSSIP_PROTOCOL_ID = "/minichain/block/1.0.0" +SYNC_PROTOCOL_ID = "/minichain/sync/1.0.0" + + +class NetworkError(ValueError): + """Raised when networking configuration or message handling is invalid.""" + + +@dataclass(frozen=True) +class PeerAddress: + """Network address for a peer node.""" + + host: str + port: int + + def validate(self) -> None: + if not self.host: + raise NetworkError("peer host must be non-empty") + if not (0 <= self.port <= 65535): + raise NetworkError("peer port must be between 0 and 65535") + + @classmethod + def from_string(cls, value: str) -> PeerAddress: + if ":" not in value: + raise NetworkError("peer must be formatted as host:port") + host, port_text = value.rsplit(":", 1) + if not port_text.isdigit(): + raise NetworkError("peer port must be numeric") + peer = cls(host=host, port=int(port_text)) + peer.validate() + return peer + + +@dataclass(frozen=True) +class PeerInfo: + """Metadata tracked for a discovered peer.""" + + node_id: str + address: PeerAddress + discovered_via: str + last_seen: int + + +@dataclass(frozen=True) +class NetworkConfig: + """Runtime configuration for the MiniChain networking service.""" + + host: str = "127.0.0.1" + port: int = 0 + advertise_host: str | None = None + node_id: str | None = None + bootstrap_peers: tuple[PeerAddress, ...] = field(default_factory=tuple) + connect_timeout_seconds: float = 2.0 + enable_mdns: bool = True + mdns_group: str = "224.1.1.199" + mdns_port: int = 10099 + mdns_interval_seconds: float = 0.5 + reconnect_interval_seconds: float = 1.0 + seen_tx_cache_size: int = 20_000 + seen_block_cache_size: int = 5_000 + sync_batch_size: int = 128 + + def validate(self) -> None: + if not self.host: + raise NetworkError("host must be non-empty") + if self.advertise_host is not None and not self.advertise_host: + raise NetworkError("advertise_host must be non-empty when provided") + if not (0 <= self.port <= 65535): + raise NetworkError("port must be between 0 and 65535") + if self.connect_timeout_seconds <= 0: + raise NetworkError("connect_timeout_seconds must be positive") + if not (0 <= self.mdns_port <= 65535): + raise NetworkError("mdns_port must be between 0 and 65535") + if self.mdns_interval_seconds <= 0: + raise NetworkError("mdns_interval_seconds must be positive") + if self.reconnect_interval_seconds <= 0: + raise NetworkError("reconnect_interval_seconds must be positive") + if self.seen_tx_cache_size <= 0: + raise NetworkError("seen_tx_cache_size must be positive") + if self.seen_block_cache_size <= 0: + raise NetworkError("seen_block_cache_size must be positive") + if self.sync_batch_size <= 0: + raise NetworkError("sync_batch_size must be positive") + for peer in self.bootstrap_peers: + peer.validate() + + +@dataclass +class _PeerConnection: + peer: PeerInfo + reader: asyncio.StreamReader + writer: asyncio.StreamWriter + task: asyncio.Task[None] | None = None + + +class _DiscoveryProtocol(asyncio.DatagramProtocol): + def __init__(self, callback: Callable[[bytes, tuple[str, int]], None]) -> None: + self._callback = callback + + def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: + self._callback(data, addr) + + +class MiniChainNetwork: + """Async TCP-based peer networking with bootstrap and multicast discovery.""" + + def __init__(self, config: NetworkConfig) -> None: + self.config = config + self.config.validate() + + self._node_id = self.config.node_id or secrets.token_hex(16) + self._server: asyncio.AbstractServer | None = None + self._connections: dict[str, _PeerConnection] = {} + self._known_peers: dict[str, PeerInfo] = {} + self._connecting_addresses: set[tuple[str, int]] = set() + self._background_tasks: set[asyncio.Task[None]] = set() + self._running = False + self._listen_port = self.config.port + + self._mdns_transport: asyncio.DatagramTransport | None = None + self._mdns_protocol: _DiscoveryProtocol | None = None + self._mdns_announce_task: asyncio.Task[None] | None = None + self._use_local_discovery = False + self._seen_transactions: set[str] = set() + self._seen_transaction_order: deque[str] = deque() + self._seen_blocks: set[str] = set() + self._seen_block_order: deque[str] = deque() + self._transaction_handler: Callable[[Transaction], bool] | None = None + self._block_handler: Callable[[Block], bool] | None = None + self._sync_height_getter: Callable[[], int] | None = None + self._sync_block_getter: Callable[[int], Block | None] | None = None + self._sync_block_applier: Callable[[Block], bool] | None = None + self._peer_advertised_heights: dict[str, int] = {} + self._sync_inflight: set[str] = set() + + @property + def node_id(self) -> str: + return self._node_id + + @property + def running(self) -> bool: + return self._running + + @property + def listen_host(self) -> str: + return self.config.host + + @property + def listen_port(self) -> int: + return self._listen_port + + @property + def advertise_host(self) -> str: + if self.config.advertise_host is not None: + return self.config.advertise_host + return self.listen_host + + def listen_address(self) -> PeerAddress: + return PeerAddress(host=self.listen_host, port=self.listen_port) + + def known_peers(self) -> list[PeerInfo]: + return sorted(self._known_peers.values(), key=lambda peer: peer.node_id) + + def connected_peer_ids(self) -> set[str]: + return set(self._connections) + + def is_connected_to(self, peer_id: str) -> bool: + return peer_id in self._connections + + def set_transaction_handler(self, handler: Callable[[Transaction], bool] | None) -> None: + """Register a local transaction validation/ingestion callback.""" + self._transaction_handler = handler + + def set_block_handler(self, handler: Callable[[Block], bool] | None) -> None: + """Register a local block validation/ingestion callback.""" + self._block_handler = handler + + def set_sync_handlers( + self, + *, + get_height: Callable[[], int] | None, + get_block_by_height: Callable[[int], Block | None] | None, + apply_block: Callable[[Block], bool] | None, + ) -> None: + """Register callbacks used by `/minichain/sync/1.0.0`.""" + self._sync_height_getter = get_height + self._sync_block_getter = get_block_by_height + self._sync_block_applier = apply_block + + async def wait_for_connected_peers(self, expected_count: int, *, timeout: float = 5.0) -> None: + if expected_count < 0: + raise NetworkError("expected_count must be non-negative") + if timeout <= 0: + raise NetworkError("timeout must be positive") + + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if len(self._connections) >= expected_count: + return + await asyncio.sleep(0.05) + raise TimeoutError( + f"Timed out waiting for {expected_count} peers; got {len(self._connections)}" + ) + + async def wait_for_height(self, expected_height: int, *, timeout: float = 5.0) -> None: + """Wait until local sync height reaches at least `expected_height`.""" + if expected_height < 0: + raise NetworkError("expected_height must be non-negative") + if timeout <= 0: + raise NetworkError("timeout must be positive") + + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if self._local_chain_height() >= expected_height: + return + await asyncio.sleep(0.05) + raise TimeoutError( + f"Timed out waiting for height {expected_height}; got {self._local_chain_height()}" + ) + + async def submit_transaction(self, transaction: Transaction) -> bool: + """Validate and gossip a locally submitted transaction.""" + if not transaction.verify(): + raise NetworkError("cannot gossip invalid transaction") + tx_id = transaction.transaction_id().hex() + if not self._remember_seen_transaction(tx_id): + return False + if not self._accept_transaction(transaction): + return False + + message = self._transaction_payload(transaction, tx_id=tx_id) + await self._broadcast_message(message, exclude_peer_ids=set()) + return True + + async def submit_block(self, block: Block, *, already_applied: bool = False) -> bool: + """Validate and gossip a locally mined or received canonical block.""" + if not block.has_valid_merkle_root(): + raise NetworkError("cannot gossip block with invalid merkle root") + + block_hash = block.hash().hex() + if not self._remember_seen_block(block_hash): + return False + if not already_applied and not self._accept_block(block): + return False + + message = self._block_payload(block, block_hash=block_hash) + await self._broadcast_message(message, exclude_peer_ids=set()) + return True + + async def start(self) -> None: + """Start the TCP server, discovery tasks, and bootstrap connections.""" + if self._running: + return + + self._server = await asyncio.start_server( + self._handle_incoming_connection, + host=self.config.host, + port=self.config.port, + ) + sockets = self._server.sockets or [] + if not sockets: + raise NetworkError("failed to bind network server socket") + self._listen_port = int(sockets[0].getsockname()[1]) + self._running = True + + if self.config.enable_mdns: + await self._start_mdns_discovery() + + for peer in self.config.bootstrap_peers: + self._spawn(self.connect_to_peer(peer, discovered_via="bootstrap")) + self._spawn(self._reconnect_loop()) + + async def stop(self) -> None: + """Stop server, discovery, and all active peer connections.""" + if not self._running: + return + self._running = False + + if self._mdns_announce_task is not None: + self._mdns_announce_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._mdns_announce_task + self._mdns_announce_task = None + + if self._mdns_transport is not None: + self._mdns_transport.close() + self._mdns_transport = None + self._mdns_protocol = None + if self._use_local_discovery: + _LOCAL_DISCOVERY_REGISTRY.discard(self) + self._use_local_discovery = False + + if self._server is not None: + self._server.close() + await self._server.wait_closed() + self._server = None + + for peer_id in list(self._connections): + self._close_connection(peer_id) + self._peer_advertised_heights.clear() + self._sync_inflight.clear() + + if self._background_tasks: + for task in list(self._background_tasks): + task.cancel() + await asyncio.gather(*self._background_tasks, return_exceptions=True) + self._background_tasks.clear() + + async def connect_to_peer(self, peer: PeerAddress, *, discovered_via: str) -> bool: + """Open a TCP connection to a peer and perform handshake.""" + peer.validate() + if self._is_self_peer(peer): + return False + + address_key = (peer.host, peer.port) + if address_key in self._connecting_addresses: + return False + if any(connection.peer.address == peer for connection in self._connections.values()): + return False + + self._connecting_addresses.add(address_key) + try: + try: + connection = asyncio.open_connection(peer.host, peer.port) + reader, writer = await asyncio.wait_for( + connection, + timeout=self.config.connect_timeout_seconds, + ) + except (TimeoutError, OSError): + return False + + try: + await self._write_message(writer, self._hello_payload()) + message = await self._read_message(reader) + peer_info = self._peer_from_hello( + message=message, + fallback_host=peer.host, + discovered_via=discovered_via, + ) + if peer_info.node_id == self.node_id: + writer.close() + await writer.wait_closed() + return False + + if not self._register_connection(peer_info, reader, writer): + writer.close() + await writer.wait_closed() + return False + + await self._write_message(writer, self._peer_list_payload()) + await self._send_sync_status(writer) + self._start_peer_reader(peer_info.node_id) + return True + except Exception: + writer.close() + with contextlib.suppress(Exception): + await writer.wait_closed() + raise + finally: + self._connecting_addresses.discard(address_key) + + async def _handle_incoming_connection( + self, + reader: asyncio.StreamReader, + writer: asyncio.StreamWriter, + ) -> None: + try: + message = await self._read_message(reader) + peername = writer.get_extra_info("peername") + fallback_host = "127.0.0.1" if not peername else str(peername[0]) + peer_info = self._peer_from_hello( + message=message, + fallback_host=fallback_host, + discovered_via="incoming", + ) + if peer_info.node_id == self.node_id: + writer.close() + await writer.wait_closed() + return + + await self._write_message(writer, self._hello_payload()) + if not self._register_connection(peer_info, reader, writer): + writer.close() + await writer.wait_closed() + return + + await self._write_message(writer, self._peer_list_payload()) + await self._send_sync_status(writer) + self._start_peer_reader(peer_info.node_id) + except Exception: + writer.close() + with contextlib.suppress(Exception): + await writer.wait_closed() + + def _register_connection( + self, + peer: PeerInfo, + reader: asyncio.StreamReader, + writer: asyncio.StreamWriter, + ) -> bool: + existing = self._connections.get(peer.node_id) + if existing is not None: + return False + + self._known_peers[peer.node_id] = peer + self._connections[peer.node_id] = _PeerConnection( + peer=peer, + reader=reader, + writer=writer, + ) + return True + + def _start_peer_reader(self, peer_id: str) -> None: + connection = self._connections.get(peer_id) + if connection is None: + return + task = asyncio.create_task(self._peer_reader_loop(peer_id)) + connection.task = task + self._background_tasks.add(task) + task.add_done_callback(self._background_tasks.discard) + + async def _peer_reader_loop(self, peer_id: str) -> None: + connection = self._connections.get(peer_id) + if connection is None: + return + try: + while self._running: + message = await self._read_message(connection.reader, eof_ok=True) + if message is None: + break + await self._handle_peer_message(peer_id, message) + except Exception: + pass + finally: + self._close_connection(peer_id) + + async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) -> None: + message_type = message.get("type") + if message_type == "peers": + await self._handle_peer_addresses(peer_id, message) + return + if message_type == "tx_result": + return + if message_type == "tx_gossip": + await self._handle_transaction_gossip(peer_id, message) + return + if message_type == "block_gossip": + await self._handle_block_gossip(peer_id, message) + return + if message_type == "sync_status": + await self._handle_sync_status(peer_id, message) + return + if message_type == "sync_request": + await self._handle_sync_request(peer_id, message) + return + if message_type == "sync_blocks": + await self._handle_sync_blocks(peer_id, message) + return + + async def _handle_peer_addresses(self, peer_id: str, message: dict[str, object]) -> None: + peers = message.get("peers") + if not isinstance(peers, list): + raise NetworkError("peers message requires list payload") + + for candidate in peers: + if not isinstance(candidate, dict): + continue + host = candidate.get("host") + port = candidate.get("port") + if not isinstance(host, str) or not isinstance(port, int): + continue + peer = PeerAddress(host=host, port=port) + if self._is_self_peer(peer): + continue + self._spawn( + self.connect_to_peer( + peer, + discovered_via=f"peer:{peer_id}", + ) + ) + + async def _handle_transaction_gossip( + self, + source_peer_id: str, + message: dict[str, object], + ) -> None: + if message.get("protocol") != TX_GOSSIP_PROTOCOL_ID: + raise NetworkError("tx_gossip protocol id mismatch") + + payload = message.get("transaction") + if not isinstance(payload, dict): + raise NetworkError("tx_gossip transaction payload must be an object") + transaction = self._transaction_from_payload(payload) + + announced_id = message.get("transaction_id") + if announced_id is not None and not isinstance(announced_id, str): + raise NetworkError("transaction_id must be a string") + + tx_id = transaction.transaction_id().hex() + accepted = False + reason = "accepted" + if not transaction.verify(): + reason = "invalid_signature_or_identity" + elif announced_id is not None and announced_id != tx_id: + reason = "transaction_id_mismatch" + elif not self._remember_seen_transaction(tx_id): + reason = "duplicate" + elif not self._accept_transaction(transaction): + reason = "rejected_by_node" + else: + accepted = True + + await self._send_tx_result( + peer_id=source_peer_id, + transaction_id=tx_id, + accepted=accepted, + reason=reason, + ) + if not accepted: + return + + forward_payload = self._transaction_payload(transaction, tx_id=tx_id) + await self._broadcast_message( + forward_payload, + exclude_peer_ids={source_peer_id}, + ) + + async def _handle_block_gossip( + self, + source_peer_id: str, + message: dict[str, object], + ) -> None: + if message.get("protocol") != BLOCK_GOSSIP_PROTOCOL_ID: + raise NetworkError("block_gossip protocol id mismatch") + + payload = message.get("block") + if not isinstance(payload, dict): + raise NetworkError("block_gossip payload must be an object") + block = self._block_from_payload(payload) + if not block.has_valid_merkle_root(): + return + + announced_hash = message.get("block_hash") + if announced_hash is not None and not isinstance(announced_hash, str): + raise NetworkError("block_hash must be a string") + + block_hash = block.hash().hex() + if announced_hash is not None and announced_hash != block_hash: + return + if not self._remember_seen_block(block_hash): + return + if not self._accept_block(block): + return + + forward_payload = self._block_payload(block, block_hash=block_hash) + await self._broadcast_message( + forward_payload, + exclude_peer_ids={source_peer_id}, + ) + + async def _handle_sync_status(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_status protocol id mismatch") + peer_height = message.get("height") + if not isinstance(peer_height, int): + raise NetworkError("sync_status height must be an integer") + if peer_height < 0: + raise NetworkError("sync_status height must be non-negative") + + self._peer_advertised_heights[peer_id] = peer_height + if peer_height > self._local_chain_height(): + self._spawn(self._request_missing_blocks(peer_id)) + + async def _handle_sync_request(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_request protocol id mismatch") + if self._sync_block_getter is None: + return + + from_height = message.get("from_height") + to_height = message.get("to_height") + if not isinstance(from_height, int) or not isinstance(to_height, int): + raise NetworkError("sync_request heights must be integers") + if from_height < 0 or to_height < from_height: + raise NetworkError("sync_request range is invalid") + + max_to_height = min(to_height, from_height + self.config.sync_batch_size - 1) + blocks: list[Block] = [] + for height in range(from_height, max_to_height + 1): + block = self._sync_block_getter(height) + if block is None: + break + blocks.append(block) + + connection = self._connections.get(peer_id) + if connection is None: + return + response = self._sync_blocks_payload(start_height=from_height, blocks=blocks) + await self._write_message(connection.writer, response) + + async def _handle_sync_blocks(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_blocks protocol id mismatch") + if self._sync_block_applier is None: + self._sync_inflight.discard(peer_id) + return + + start_height = message.get("start_height") + payloads = message.get("blocks") + if not isinstance(start_height, int): + raise NetworkError("sync_blocks start_height must be an integer") + if not isinstance(payloads, list): + raise NetworkError("sync_blocks blocks must be a list") + + for entry in payloads: + if not isinstance(entry, dict): + raise NetworkError("sync_blocks entry must be an object") + block = self._block_from_payload(entry) + if not block.has_valid_merkle_root(): + self._sync_inflight.discard(peer_id) + return + if not self._sync_block_applier(block): + self._sync_inflight.discard(peer_id) + return + self._remember_seen_block(block.hash().hex()) + + if not payloads: + self._sync_inflight.discard(peer_id) + return + + self._sync_inflight.discard(peer_id) + if self._peer_advertised_heights.get(peer_id, -1) > self._local_chain_height(): + self._spawn(self._request_missing_blocks(peer_id)) + + async def _request_missing_blocks(self, peer_id: str) -> None: + if peer_id in self._sync_inflight: + return + remote_height = self._peer_advertised_heights.get(peer_id) + if remote_height is None: + return + + local_height = self._local_chain_height() + if remote_height <= local_height: + return + + connection = self._connections.get(peer_id) + if connection is None: + return + + from_height = local_height + 1 + to_height = min(remote_height, from_height + self.config.sync_batch_size - 1) + request = self._sync_request_payload(from_height=from_height, to_height=to_height) + self._sync_inflight.add(peer_id) + try: + await self._write_message(connection.writer, request) + except Exception: + self._sync_inflight.discard(peer_id) + self._close_connection(peer_id) + + def _close_connection(self, peer_id: str) -> None: + connection = self._connections.pop(peer_id, None) + if connection is None: + return + + self._peer_advertised_heights.pop(peer_id, None) + self._sync_inflight.discard(peer_id) + + if connection.task is not None and not connection.task.done(): + connection.task.cancel() + connection.writer.close() + + async def _broadcast_message( + self, + payload: dict[str, object], + *, + exclude_peer_ids: set[str], + ) -> None: + failed_peer_ids: list[str] = [] + for peer_id, connection in list(self._connections.items()): + if peer_id in exclude_peer_ids: + continue + try: + await self._write_message(connection.writer, payload) + except Exception: + failed_peer_ids.append(peer_id) + + for peer_id in failed_peer_ids: + self._close_connection(peer_id) + + async def _start_mdns_discovery(self) -> None: + loop = asyncio.get_running_loop() + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if hasattr(socket, "SO_REUSEPORT"): + with contextlib.suppress(OSError): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + sock.bind(("", self.config.mdns_port)) + + membership = struct.pack( + "=4s4s", + socket.inet_aton(self.config.mdns_group), + socket.inet_aton("0.0.0.0"), + ) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, membership) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 1) + + protocol = _DiscoveryProtocol(self._on_discovery_packet) + transport, _ = await loop.create_datagram_endpoint( + lambda: protocol, + sock=sock, + ) + self._mdns_transport = transport + self._mdns_protocol = protocol + except OSError: + self._use_local_discovery = True + _LOCAL_DISCOVERY_REGISTRY.add(self) + + self._mdns_announce_task = asyncio.create_task(self._announce_loop()) + self._background_tasks.add(self._mdns_announce_task) + self._mdns_announce_task.add_done_callback(self._background_tasks.discard) + + async def _announce_loop(self) -> None: + while self._running and (self._mdns_transport is not None or self._use_local_discovery): + payload = { + "service": "minichain", + "node_id": self.node_id, + "host": self.advertise_host, + "port": self.listen_port, + } + encoded = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + if self._use_local_discovery: + for peer in list(_LOCAL_DISCOVERY_REGISTRY): + if peer is self: + continue + peer._on_discovery_packet(encoded, (self.listen_host, self.listen_port)) + elif self._mdns_transport is not None: + self._mdns_transport.sendto( + encoded, + (self.config.mdns_group, self.config.mdns_port), + ) + await asyncio.sleep(self.config.mdns_interval_seconds) + + def _on_discovery_packet(self, data: bytes, _addr: tuple[str, int]) -> None: + try: + payload = json.loads(data.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError): + return + if not isinstance(payload, dict): + return + if payload.get("service") != "minichain": + return + + node_id = payload.get("node_id") + host = payload.get("host") + port = payload.get("port") + if not isinstance(node_id, str) or not isinstance(host, str) or not isinstance(port, int): + return + if node_id == self.node_id: + return + + peer = PeerAddress(host=host, port=port) + info = PeerInfo( + node_id=node_id, + address=peer, + discovered_via="mdns", + last_seen=int(time.time()), + ) + existing = self._known_peers.get(node_id) + if existing is None: + self._known_peers[node_id] = info + self._spawn(self.connect_to_peer(peer, discovered_via="mdns")) + + async def _reconnect_loop(self) -> None: + while self._running: + for peer in self._reconnect_candidates(): + if not self._running: + return + await self.connect_to_peer(peer, discovered_via="reconnect") + await asyncio.sleep(self.config.reconnect_interval_seconds) + + def _reconnect_candidates(self) -> tuple[PeerAddress, ...]: + seen: set[tuple[str, int]] = set() + ordered: list[PeerAddress] = [] + + for peer in self.config.bootstrap_peers: + key = (peer.host, peer.port) + if key in seen or self._is_self_peer(peer): + continue + seen.add(key) + ordered.append(peer) + + for info in self._known_peers.values(): + peer = info.address + key = (peer.host, peer.port) + if key in seen or self._is_self_peer(peer): + continue + seen.add(key) + ordered.append(peer) + + return tuple(ordered) + + def _spawn(self, coroutine: Coroutine[Any, Any, Any]) -> None: + task = asyncio.create_task(coroutine) + self._background_tasks.add(task) + task.add_done_callback(self._background_tasks.discard) + + def _is_self_peer(self, peer: PeerAddress) -> bool: + if peer.port != self.listen_port: + return False + known_self_hosts = { + self.listen_host, + self.advertise_host, + } + return peer.host in known_self_hosts + + async def _send_sync_status(self, writer: asyncio.StreamWriter) -> None: + payload = self._sync_status_payload(height=self._local_chain_height()) + await self._write_message(writer, payload) + + async def _send_tx_result( + self, + *, + peer_id: str, + transaction_id: str, + accepted: bool, + reason: str, + ) -> None: + connection = self._connections.get(peer_id) + if connection is None: + return + payload = self._tx_result_payload( + transaction_id=transaction_id, + accepted=accepted, + reason=reason, + ) + try: + await self._write_message(connection.writer, payload) + except Exception: + self._close_connection(peer_id) + + def _local_chain_height(self) -> int: + if self._sync_height_getter is None: + return 0 + try: + height = int(self._sync_height_getter()) + except Exception: + return 0 + return max(0, height) + + def _remember_seen_transaction(self, transaction_id: str) -> bool: + if transaction_id in self._seen_transactions: + return False + + self._seen_transactions.add(transaction_id) + self._seen_transaction_order.append(transaction_id) + while len(self._seen_transactions) > self.config.seen_tx_cache_size: + oldest = self._seen_transaction_order.popleft() + self._seen_transactions.discard(oldest) + return True + + def _accept_transaction(self, transaction: Transaction) -> bool: + if self._transaction_handler is None: + return True + try: + return bool(self._transaction_handler(transaction)) + except Exception: + return False + + def _remember_seen_block(self, block_hash: str) -> bool: + if block_hash in self._seen_blocks: + return False + + self._seen_blocks.add(block_hash) + self._seen_block_order.append(block_hash) + while len(self._seen_blocks) > self.config.seen_block_cache_size: + oldest = self._seen_block_order.popleft() + self._seen_blocks.discard(oldest) + return True + + def _accept_block(self, block: Block) -> bool: + if self._block_handler is None: + return True + try: + return bool(self._block_handler(block)) + except Exception: + return False + + def _hello_payload(self) -> dict[str, object]: + return { + "type": "hello", + "node_id": self.node_id, + "host": self.advertise_host, + "port": self.listen_port, + } + + def _peer_list_payload(self) -> dict[str, object]: + unique_peers = { + (peer.address.host, peer.address.port) + for peer in self._known_peers.values() + } + unique_peers.update((peer.host, peer.port) for peer in self.config.bootstrap_peers) + unique_peers.discard((self.listen_host, self.listen_port)) + unique_peers.discard((self.advertise_host, self.listen_port)) + peers = [{"host": host, "port": port} for host, port in sorted(unique_peers)] + return {"type": "peers", "peers": peers} + + def _sync_status_payload(self, *, height: int) -> dict[str, object]: + return { + "type": "sync_status", + "protocol": SYNC_PROTOCOL_ID, + "height": height, + } + + def _sync_request_payload(self, *, from_height: int, to_height: int) -> dict[str, object]: + return { + "type": "sync_request", + "protocol": SYNC_PROTOCOL_ID, + "from_height": from_height, + "to_height": to_height, + } + + def _sync_blocks_payload(self, *, start_height: int, blocks: list[Block]) -> dict[str, object]: + return { + "type": "sync_blocks", + "protocol": SYNC_PROTOCOL_ID, + "start_height": start_height, + "blocks": [self._encode_block(block) for block in blocks], + } + + def _transaction_payload(self, transaction: Transaction, *, tx_id: str) -> dict[str, object]: + return { + "type": "tx_gossip", + "protocol": TX_GOSSIP_PROTOCOL_ID, + "transaction_id": tx_id, + "transaction": asdict(transaction), + } + + def _tx_result_payload( + self, + *, + transaction_id: str, + accepted: bool, + reason: str, + ) -> dict[str, object]: + return { + "type": "tx_result", + "transaction_id": transaction_id, + "accepted": accepted, + "reason": reason, + } + + def _transaction_from_payload(self, payload: dict[str, object]) -> Transaction: + try: + return Transaction(**payload) + except TypeError as exc: + raise NetworkError("invalid transaction payload shape") from exc + + def _block_payload(self, block: Block, *, block_hash: str) -> dict[str, object]: + return { + "type": "block_gossip", + "protocol": BLOCK_GOSSIP_PROTOCOL_ID, + "block_hash": block_hash, + "block": self._encode_block(block), + } + + @staticmethod + def _encode_block(block: Block) -> dict[str, object]: + return { + "header": asdict(block.header), + "transactions": [asdict(transaction) for transaction in block.transactions], + } + + def _block_from_payload(self, payload: dict[str, object]) -> Block: + header_payload = payload.get("header") + transactions_payload = payload.get("transactions") + if not isinstance(header_payload, dict): + raise NetworkError("block header payload must be an object") + if not isinstance(transactions_payload, list): + raise NetworkError("block transactions payload must be a list") + + try: + header = BlockHeader(**header_payload) + except TypeError as exc: + raise NetworkError("invalid block header payload shape") from exc + + transactions: list[Transaction] = [] + for transaction_payload in transactions_payload: + if not isinstance(transaction_payload, dict): + raise NetworkError("transaction entry must be an object") + try: + transactions.append(Transaction(**transaction_payload)) + except TypeError as exc: + raise NetworkError("invalid block transaction payload shape") from exc + return Block(header=header, transactions=transactions) + + def _peer_from_hello( + self, + *, + message: dict[str, object], + fallback_host: str, + discovered_via: str, + ) -> PeerInfo: + if message.get("type") != "hello": + raise NetworkError("handshake message must be type=hello") + node_id = message.get("node_id") + host = message.get("host") + port = message.get("port") + if not isinstance(node_id, str): + raise NetworkError("handshake node_id must be a string") + if not isinstance(port, int): + raise NetworkError("handshake port must be an integer") + if not isinstance(host, str) or not host: + host = fallback_host + address = PeerAddress(host=host, port=port) + address.validate() + return PeerInfo( + node_id=node_id, + address=address, + discovered_via=discovered_via, + last_seen=int(time.time()), + ) + + async def _read_message( + self, + reader: asyncio.StreamReader, + *, + eof_ok: bool = False, + ) -> dict[str, object] | None: + line = await asyncio.wait_for( + reader.readline(), + timeout=self.config.connect_timeout_seconds, + ) + if not line: + if eof_ok: + return None + raise NetworkError("unexpected EOF while reading peer message") + + try: + payload = json.loads(line.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + raise NetworkError("received malformed JSON message") from exc + if not isinstance(payload, dict): + raise NetworkError("message payload must be an object") + return payload + + async def _write_message( + self, + writer: asyncio.StreamWriter, + payload: dict[str, object], + ) -> None: + body = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + writer.write(body + b"\n") + await writer.drain() diff --git a/src/minichain/node.py b/src/minichain/node.py new file mode 100644 index 0000000..fceb35b --- /dev/null +++ b/src/minichain/node.py @@ -0,0 +1,271 @@ +"""Node orchestration layer for MiniChain.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from pathlib import Path + +from minichain.block import Block +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool, MempoolValidationError +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.state import State +from minichain.storage import SQLiteStorage, StorageError +from minichain.transaction import ADDRESS_HEX_LENGTH, Transaction + + +class NodeError(ValueError): + """Raised when node lifecycle or orchestration operations fail.""" + + +@dataclass(frozen=True) +class NodeConfig: + """Runtime configuration for a MiniChain node.""" + + data_dir: Path | str + database_filename: str = "chain.sqlite3" + miner_address: str | None = None + max_block_transactions: int = 1_000 + mempool_max_size: int = 1_000 + mempool_max_age_seconds: int = 3_600 + genesis_config: GenesisConfig = field(default_factory=GenesisConfig) + chain_config: ChainConfig = field(default_factory=ChainConfig) + + def validate(self) -> None: + if self.max_block_transactions < 0: + raise NodeError("max_block_transactions must be non-negative") + if self.mempool_max_size <= 0: + raise NodeError("mempool_max_size must be positive") + if self.mempool_max_age_seconds <= 0: + raise NodeError("mempool_max_age_seconds must be positive") + if self.miner_address is not None and not _is_lower_hex( + self.miner_address, ADDRESS_HEX_LENGTH + ): + raise NodeError("miner_address must be a 20-byte lowercase hex string") + self.genesis_config.validate() + self.chain_config.validate() + + +class MiniChainNode: + """Top-level node that coordinates chain, mempool, mining, and storage.""" + + def __init__(self, config: NodeConfig) -> None: + self.config = config + self.config.validate() + + self._storage: SQLiteStorage | None = None + self._chain_manager: ChainManager | None = None + self._mempool: Mempool | None = None + self._running = False + + @property + def running(self) -> bool: + return self._running + + @property + def chain_manager(self) -> ChainManager: + if self._chain_manager is None: + raise NodeError("Node is not started") + return self._chain_manager + + @property + def mempool(self) -> Mempool: + if self._mempool is None: + raise NodeError("Node is not started") + return self._mempool + + @property + def storage(self) -> SQLiteStorage: + if self._storage is None: + raise NodeError("Node is not started") + return self._storage + + @property + def height(self) -> int: + return self.chain_manager.height + + @property + def tip_hash(self) -> str: + return self.chain_manager.tip_hash + + def start(self) -> None: + """Start node components and load or initialize persistent chain state.""" + if self._running: + return + + data_dir = Path(self.config.data_dir) + data_dir.mkdir(parents=True, exist_ok=True) + db_path = data_dir / self.config.database_filename + + storage = SQLiteStorage(db_path) + chain_manager = self._initialize_chain_manager(storage) + mempool = Mempool( + max_size=self.config.mempool_max_size, + max_age_seconds=self.config.mempool_max_age_seconds, + ) + + self._storage = storage + self._chain_manager = chain_manager + self._mempool = mempool + self._running = True + + def stop(self) -> None: + """Stop node components and close persistent resources.""" + if not self._running: + return + try: + if self._storage is not None: + self._storage.close() + finally: + self._storage = None + self._chain_manager = None + self._mempool = None + self._running = False + + def submit_transaction(self, transaction: Transaction) -> str: + """Validate and enqueue a transaction into the mempool.""" + self._require_started() + try: + return self.mempool.add_transaction(transaction, self.chain_manager.state) + except MempoolValidationError as exc: + raise NodeError(f"Transaction rejected by mempool: {exc}") from exc + + def accept_block(self, block: Block) -> str: + """Validate and apply a block; persist state on canonical updates.""" + self._require_started() + try: + result = self.chain_manager.add_block(block) + except ChainValidationError as exc: + raise NodeError(f"Block rejected: {exc}") from exc + + if result in {"extended", "reorg"}: + self.mempool.remove_confirmed_transactions(block.transactions, self.chain_manager.state) + self._persist_head() + + return result + + def mine_one_block( + self, + *, + timestamp: int | None = None, + max_nonce: int = (1 << 64) - 1, + max_transactions: int | None = None, + ) -> Block: + """Build, mine, and apply one block on top of the canonical tip.""" + self._require_started() + miner_address = self.config.miner_address + if miner_address is None: + raise NodeError("miner_address must be configured to mine blocks") + + limit = ( + self.config.max_block_transactions + if max_transactions is None + else max_transactions + ) + candidate = build_candidate_block( + chain_manager=self.chain_manager, + mempool=self.mempool, + miner_address=miner_address, + max_transactions=limit, + timestamp=timestamp, + ) + mined_block, _digest = mine_candidate_block(block_template=candidate, max_nonce=max_nonce) + result = self.accept_block(mined_block) + if result not in {"extended", "reorg"}: + raise NodeError(f"Mined block was not canonicalized: {result}") + return mined_block + + def _persist_head(self) -> None: + try: + self.storage.persist_block_state_and_metadata( + block=self.chain_manager.tip_block, + state=self.chain_manager.state, + height=self.chain_manager.height, + head_hash=self.chain_manager.tip_hash, + ) + except StorageError as exc: + raise NodeError(f"Failed to persist canonical head: {exc}") from exc + + def _initialize_chain_manager(self, storage: SQLiteStorage) -> ChainManager: + metadata = storage.load_chain_metadata() + genesis_block, genesis_state = create_genesis_state(self.config.genesis_config) + manager = ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=self.config.chain_config, + ) + + if metadata is None: + storage.persist_block_state_and_metadata( + block=manager.tip_block, + state=manager.state, + height=0, + head_hash=manager.tip_hash, + ) + return manager + + stored_genesis = storage.get_block_by_height(0) + if stored_genesis is None: + raise NodeError("Storage metadata exists but genesis block is missing") + if stored_genesis.hash().hex() != manager.tip_hash: + raise NodeError("Stored genesis does not match configured genesis") + + target_height = int(metadata["height"]) + for height in range(1, target_height + 1): + block = storage.get_block_by_height(height) + if block is None: + raise NodeError(f"Missing persisted block at height {height}") + result = manager.add_block(block) + if result not in {"extended", "reorg"}: + raise NodeError( + f"Unexpected replay result at height {height}: {result}" + ) + + expected_head_hash = str(metadata["head_hash"]) + if manager.tip_hash != expected_head_hash: + raise NodeError( + "Persisted head hash mismatch: " + f"expected {expected_head_hash}, got {manager.tip_hash}" + ) + + persisted_state = storage.load_state() + if not _states_equal(persisted_state, manager.state): + raise NodeError("Persisted state does not match replayed canonical state") + + return manager + + def _require_started(self) -> None: + if not self._running: + raise NodeError("Node is not started") + + +def start_node(host: str, port: int) -> None: + """Start a MiniChain node with local defaults and print its status.""" + data_dir = Path(".minichain") + default_config = NodeConfig(data_dir=data_dir) + node = MiniChainNode(default_config) + node.start() + try: + print(f"MiniChain node started on {host}:{port}") + print(f"chain_height={node.height} tip={node.tip_hash}") + finally: + node.stop() + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +def _states_equal(left: State, right: State) -> bool: + left_accounts = { + address: (account.balance, account.nonce) + for address, account in left.accounts.items() + } + right_accounts = { + address: (account.balance, account.nonce) + for address, account in right.accounts.items() + } + return left_accounts == right_accounts diff --git a/src/minichain/serialization.py b/src/minichain/serialization.py new file mode 100644 index 0000000..a9d91cd --- /dev/null +++ b/src/minichain/serialization.py @@ -0,0 +1,66 @@ +"""Deterministic serialization helpers for consensus-critical data.""" + +from __future__ import annotations + +import json +from typing import Any, Mapping + +TRANSACTION_FIELD_ORDER = ( + "sender", + "recipient", + "amount", + "nonce", + "fee", + "timestamp", +) + +BLOCK_HEADER_FIELD_ORDER = ( + "version", + "previous_hash", + "merkle_root", + "timestamp", + "difficulty_target", + "nonce", + "block_height", +) + + +def _to_field_map( + value: Mapping[str, Any] | object, field_order: tuple[str, ...] +) -> dict[str, Any]: + if isinstance(value, Mapping): + source = dict(value) + else: + source = {field: getattr(value, field) for field in field_order if hasattr(value, field)} + + missing = [field for field in field_order if field not in source] + if missing: + raise ValueError(f"Missing required fields: {', '.join(missing)}") + + extras = sorted(set(source) - set(field_order)) + if extras: + raise ValueError(f"Unexpected fields: {', '.join(extras)}") + + return {field: source[field] for field in field_order} + + +def serialize_canonical(value: Mapping[str, Any] | object, field_order: tuple[str, ...]) -> bytes: + """Serialize a structure to canonical UTF-8 JSON bytes.""" + canonical_map = _to_field_map(value, field_order) + text = json.dumps( + canonical_map, + ensure_ascii=False, + sort_keys=True, + separators=(",", ":"), + ) + return text.encode("utf-8") + + +def serialize_transaction(value: Mapping[str, Any] | object) -> bytes: + """Serialize a transaction using the canonical transaction field order.""" + return serialize_canonical(value, TRANSACTION_FIELD_ORDER) + + +def serialize_block_header(value: Mapping[str, Any] | object) -> bytes: + """Serialize a block header using the canonical block header field order.""" + return serialize_canonical(value, BLOCK_HEADER_FIELD_ORDER) diff --git a/src/minichain/state.py b/src/minichain/state.py new file mode 100644 index 0000000..9238ca4 --- /dev/null +++ b/src/minichain/state.py @@ -0,0 +1,102 @@ +"""Account state and ledger transitions.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.block import Block, BlockValidationError +from minichain.transaction import Transaction + + +@dataclass +class Account: + """Account state for an address.""" + + balance: int = 0 + nonce: int = 0 + + +class StateTransitionError(ValueError): + """Raised when a transaction or block cannot be applied.""" + + +class State: + """Mutable account-state mapping and transition engine.""" + + def __init__(self) -> None: + self.accounts: dict[str, Account] = {} + + def copy(self) -> State: + snapshot = State() + snapshot.accounts = { + address: Account(balance=account.balance, nonce=account.nonce) + for address, account in self.accounts.items() + } + return snapshot + + def set_account(self, address: str, account: Account) -> None: + self.accounts[address] = account + + def get_account(self, address: str) -> Account: + if address not in self.accounts: + self.accounts[address] = Account() + return self.accounts[address] + + def apply_transaction(self, transaction: Transaction) -> None: + if transaction.is_coinbase(): + raise StateTransitionError( + "Coinbase transaction must be applied through apply_block" + ) + if not transaction.verify(): + raise StateTransitionError("Transaction signature/identity verification failed") + + sender = self.get_account(transaction.sender) + recipient = self.get_account(transaction.recipient) + + if sender.nonce != transaction.nonce: + raise StateTransitionError( + f"Nonce mismatch for sender {transaction.sender}: " + f"expected {sender.nonce}, got {transaction.nonce}" + ) + + total_cost = transaction.amount + transaction.fee + if sender.balance < total_cost: + raise StateTransitionError( + f"Insufficient balance for sender {transaction.sender}: " + f"required {total_cost}, available {sender.balance}" + ) + + sender.balance -= total_cost + sender.nonce += 1 + recipient.balance += transaction.amount + + def apply_coinbase_transaction(self, transaction: Transaction) -> None: + if not transaction.is_coinbase(): + raise StateTransitionError("Invalid coinbase transaction") + miner = self.get_account(transaction.recipient) + miner.balance += transaction.amount + + def apply_block(self, block: Block, *, block_reward: int = 0) -> None: + try: + block.validate_coinbase(block_reward=block_reward) + except BlockValidationError as exc: + raise StateTransitionError(f"Block validation failed: {exc}") from exc + + snapshot = self.copy() + try: + self.apply_coinbase_transaction(block.transactions[0]) + for transaction in block.transactions[1:]: + self.apply_transaction(transaction) + except StateTransitionError as exc: + self.accounts = snapshot.accounts + raise StateTransitionError(f"Block application failed: {exc}") from exc + + +def apply_transaction(state: State, transaction: Transaction) -> None: + """Apply a transaction to state with validation.""" + state.apply_transaction(transaction) + + +def apply_block(state: State, block: Block, *, block_reward: int = 0) -> None: + """Apply all block transactions atomically, rolling back on failure.""" + state.apply_block(block, block_reward=block_reward) diff --git a/src/minichain/storage.py b/src/minichain/storage.py new file mode 100644 index 0000000..ffd1aab --- /dev/null +++ b/src/minichain/storage.py @@ -0,0 +1,280 @@ +"""Persistent storage integration using SQLite.""" + +from __future__ import annotations + +import json +import sqlite3 +from dataclasses import asdict +from pathlib import Path + +from minichain.block import Block, BlockHeader +from minichain.state import Account, State +from minichain.transaction import Transaction + + +class StorageError(ValueError): + """Raised when persistence operations fail validation or constraints.""" + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +class SQLiteStorage: + """SQLite-backed block/state persistence.""" + + def __init__(self, db_path: str | Path) -> None: + self.db_path = str(db_path) + self._connection = sqlite3.connect(self.db_path) + self._connection.execute("PRAGMA foreign_keys = ON") + self._initialize_schema() + + def close(self) -> None: + self._connection.close() + + def __enter__(self) -> SQLiteStorage: + return self + + def __exit__(self, _exc_type: object, _exc: object, _tb: object) -> None: + self.close() + + def _initialize_schema(self) -> None: + self._connection.executescript( + """ + CREATE TABLE IF NOT EXISTS blocks ( + hash TEXT PRIMARY KEY, + height INTEGER NOT NULL UNIQUE, + version INTEGER NOT NULL, + previous_hash TEXT NOT NULL, + merkle_root TEXT NOT NULL, + timestamp INTEGER NOT NULL, + difficulty_target TEXT NOT NULL, + nonce INTEGER NOT NULL, + transactions_json TEXT NOT NULL + ); + + CREATE TABLE IF NOT EXISTS accounts ( + address TEXT PRIMARY KEY, + balance INTEGER NOT NULL, + nonce INTEGER NOT NULL + ); + + CREATE TABLE IF NOT EXISTS chain_metadata ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ); + """ + ) + self._connection.commit() + + def store_block(self, block: Block, *, connection: sqlite3.Connection | None = None) -> None: + """Persist a block by hash and height.""" + if connection is None: + with self._connection: + self.store_block(block, connection=self._connection) + return + + if not block.has_valid_merkle_root(): + raise StorageError("Block merkle_root does not match transactions") + + block_hash = block.hash().hex() + transactions_json = json.dumps( + [asdict(transaction) for transaction in block.transactions], + sort_keys=True, + separators=(",", ":"), + ) + conn = connection + + try: + conn.execute( + """ + INSERT INTO blocks ( + hash, height, version, previous_hash, merkle_root, + timestamp, difficulty_target, nonce, transactions_json + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + block_hash, + block.header.block_height, + block.header.version, + block.header.previous_hash, + block.header.merkle_root, + block.header.timestamp, + str(block.header.difficulty_target), + block.header.nonce, + transactions_json, + ), + ) + except sqlite3.IntegrityError as exc: + raise StorageError( + f"Block already exists or violates constraints: {block_hash}" + ) from exc + + def get_block_by_hash(self, block_hash: str) -> Block | None: + """Load a block by hash.""" + row = self._connection.execute( + """ + SELECT + height, version, previous_hash, merkle_root, timestamp, + difficulty_target, nonce, transactions_json + FROM blocks + WHERE hash = ? + """, + (block_hash,), + ).fetchone() + if row is None: + return None + return self._row_to_block(row) + + def get_block_by_height(self, height: int) -> Block | None: + """Load a block by canonical height.""" + row = self._connection.execute( + """ + SELECT + height, version, previous_hash, merkle_root, timestamp, + difficulty_target, nonce, transactions_json + FROM blocks + WHERE height = ? + """, + (height,), + ).fetchone() + if row is None: + return None + return self._row_to_block(row) + + def save_state(self, state: State, *, connection: sqlite3.Connection | None = None) -> None: + """Persist all accounts as the current canonical state snapshot.""" + if connection is None: + with self._connection: + self.save_state(state, connection=self._connection) + return + + conn = connection + + for address, account in state.accounts.items(): + if not _is_lower_hex(address, 40): + raise StorageError(f"Invalid account address: {address}") + if account.balance < 0 or account.nonce < 0: + raise StorageError(f"Invalid account values for {address}") + + conn.execute("DELETE FROM accounts") + rows = [ + (address, account.balance, account.nonce) + for address, account in sorted(state.accounts.items()) + ] + conn.executemany( + "INSERT INTO accounts (address, balance, nonce) VALUES (?, ?, ?)", + rows, + ) + + def load_state(self) -> State: + """Load the latest persisted account snapshot.""" + state = State() + rows = self._connection.execute( + "SELECT address, balance, nonce FROM accounts ORDER BY address" + ).fetchall() + for address, balance, nonce in rows: + state.set_account(address, Account(balance=balance, nonce=nonce)) + return state + + def save_chain_metadata( + self, + *, + height: int, + head_hash: str, + connection: sqlite3.Connection | None = None, + ) -> None: + """Persist canonical chain metadata.""" + if connection is None: + with self._connection: + self.save_chain_metadata( + height=height, + head_hash=head_hash, + connection=self._connection, + ) + return + + if height < 0: + raise StorageError("height must be non-negative") + if not _is_lower_hex(head_hash, 64): + raise StorageError("head_hash must be a 32-byte lowercase hex string") + + conn = connection + conn.execute( + """ + INSERT INTO chain_metadata (key, value) VALUES ('height', ?) + ON CONFLICT(key) DO UPDATE SET value = excluded.value + """, + (str(height),), + ) + conn.execute( + """ + INSERT INTO chain_metadata (key, value) VALUES ('head_hash', ?) + ON CONFLICT(key) DO UPDATE SET value = excluded.value + """, + (head_hash,), + ) + + def load_chain_metadata(self) -> dict[str, int | str] | None: + """Load canonical chain metadata (height and head hash).""" + rows = self._connection.execute( + "SELECT key, value FROM chain_metadata WHERE key IN ('height', 'head_hash')" + ).fetchall() + if not rows: + return None + kv = {key: value for key, value in rows} + if "height" not in kv or "head_hash" not in kv: + raise StorageError("Incomplete chain metadata in storage") + return {"height": int(kv["height"]), "head_hash": kv["head_hash"]} + + def persist_block_state_and_metadata( + self, + *, + block: Block, + state: State, + height: int | None = None, + head_hash: str | None = None, + ) -> None: + """Atomically persist block, state snapshot, and metadata.""" + resolved_height = block.header.block_height if height is None else height + resolved_head_hash = block.hash().hex() if head_hash is None else head_hash + + with self._connection: + self.store_block(block, connection=self._connection) + self.save_state(state, connection=self._connection) + self.save_chain_metadata( + height=resolved_height, + head_hash=resolved_head_hash, + connection=self._connection, + ) + + @staticmethod + def _row_to_block(row: sqlite3.Row | tuple[object, ...]) -> Block: + ( + height, + version, + previous_hash, + merkle_root, + timestamp, + difficulty_target, + nonce, + transactions_json, + ) = row + header = BlockHeader( + version=int(version), + previous_hash=str(previous_hash), + merkle_root=str(merkle_root), + timestamp=int(timestamp), + difficulty_target=int(difficulty_target), + nonce=int(nonce), + block_height=int(height), + ) + transaction_dicts = json.loads(str(transactions_json)) + transactions = [Transaction(**tx) for tx in transaction_dicts] + block = Block(header=header, transactions=transactions) + if not block.has_valid_merkle_root(): + raise StorageError("Corrupt block data: merkle_root mismatch") + return block diff --git a/src/minichain/transaction.py b/src/minichain/transaction.py new file mode 100644 index 0000000..758a659 --- /dev/null +++ b/src/minichain/transaction.py @@ -0,0 +1,144 @@ +"""Transaction data structures and validation rules.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.crypto import ( + blake2b_digest, + derive_address, + deserialize_verify_key, + serialize_verify_key, + sign_message, + verify_signature, +) +from minichain.serialization import serialize_transaction + +ADDRESS_HEX_LENGTH = 40 +PUBLIC_KEY_HEX_LENGTH = 64 +SIGNATURE_HEX_LENGTH = 128 +COINBASE_SENDER = "00" * 20 + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +@dataclass +class Transaction: + """A signed account-transfer transaction.""" + + sender: str + recipient: str + amount: int + nonce: int + fee: int + timestamp: int + signature: str = "" + public_key: str = "" + + def is_coinbase(self) -> bool: + """Return whether this transaction follows coinbase conventions.""" + if not _is_lower_hex(self.recipient, ADDRESS_HEX_LENGTH): + return False + if not isinstance(self.amount, int) or self.amount <= 0: + return False + if not isinstance(self.timestamp, int) or self.timestamp < 0: + return False + return ( + self.sender == COINBASE_SENDER + and self.nonce == 0 + and self.fee == 0 + and self.signature == "" + and self.public_key == "" + ) + + def signing_payload(self) -> dict[str, int | str]: + """Return the canonical transaction payload that is signed.""" + return { + "sender": self.sender, + "recipient": self.recipient, + "amount": self.amount, + "nonce": self.nonce, + "fee": self.fee, + "timestamp": self.timestamp, + } + + def signing_bytes(self) -> bytes: + """Return canonical bytes for signature generation/verification.""" + return serialize_transaction(self.signing_payload()) + + def transaction_id(self) -> bytes: + """Return a deterministic transaction hash for Merkle commitments.""" + digest_input = bytearray(self.signing_bytes()) + if self.signature: + digest_input.extend(bytes.fromhex(self.signature)) + if self.public_key: + digest_input.extend(bytes.fromhex(self.public_key)) + return blake2b_digest(bytes(digest_input)) + + def _validate_common_fields(self) -> bool: + if not _is_lower_hex(self.sender, ADDRESS_HEX_LENGTH): + return False + if not _is_lower_hex(self.recipient, ADDRESS_HEX_LENGTH): + return False + if not isinstance(self.amount, int) or self.amount < 0: + return False + if not isinstance(self.nonce, int) or self.nonce < 0: + return False + if not isinstance(self.fee, int) or self.fee < 0: + return False + if not isinstance(self.timestamp, int) or self.timestamp < 0: + return False + return True + + def sign(self, signing_key: object) -> None: + """Sign this transaction in-place and populate auth fields.""" + if not self._validate_common_fields(): + raise ValueError("Invalid transaction fields") + verify_key = signing_key.verify_key + self.public_key = serialize_verify_key(verify_key) + self.signature = sign_message(self.signing_bytes(), signing_key).hex() + + def verify(self) -> bool: + """Verify transaction structure, signer identity, and signature.""" + if self.is_coinbase(): + return True + if not self._validate_common_fields(): + return False + if not _is_lower_hex(self.public_key, PUBLIC_KEY_HEX_LENGTH): + return False + if not _is_lower_hex(self.signature, SIGNATURE_HEX_LENGTH): + return False + + try: + verify_key = deserialize_verify_key(self.public_key) + except Exception: + return False + + if derive_address(verify_key) != self.sender: + return False + signature_bytes = bytes.fromhex(self.signature) + return verify_signature(self.signing_bytes(), signature_bytes, verify_key) + + +def create_coinbase_transaction( + *, + miner_address: str, + amount: int, + timestamp: int, +) -> Transaction: + """Build a canonical coinbase transaction.""" + coinbase = Transaction( + sender=COINBASE_SENDER, + recipient=miner_address, + amount=amount, + nonce=0, + fee=0, + timestamp=timestamp, + ) + if not coinbase.is_coinbase(): + raise ValueError("Invalid coinbase transaction fields") + return coinbase diff --git a/tests/test_block.py b/tests/test_block.py new file mode 100644 index 0000000..0be3783 --- /dev/null +++ b/tests/test_block.py @@ -0,0 +1,118 @@ +"""Unit tests for block hashing and transaction commitments.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader, BlockValidationError +from minichain.crypto import derive_address, generate_key_pair +from minichain.transaction import Transaction, create_coinbase_transaction + + +def _make_signed_transaction(amount: int, nonce: int) -> Transaction: + signing_key, verify_key = generate_key_pair() + tx = Transaction( + sender=derive_address(verify_key), + recipient="ab" * 20, + amount=amount, + nonce=nonce, + fee=1, + timestamp=1_739_800_000 + nonce, + ) + tx.sign(signing_key) + return tx + + +def _make_block() -> Block: + transactions = [ + _make_signed_transaction(amount=10, nonce=0), + _make_signed_transaction(amount=11, nonce=1), + ] + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_800_111, + difficulty_target=1_000_000, + nonce=7, + block_height=1, + ) + block = Block(header=header, transactions=transactions) + block.update_header_merkle_root() + return block + + +def _make_block_with_coinbase(*, block_reward: int = 50) -> Block: + miner_key, miner_verify = generate_key_pair() + _ = miner_key + regular_transactions = [ + _make_signed_transaction(amount=10, nonce=0), + _make_signed_transaction(amount=11, nonce=1), + ] + coinbase = create_coinbase_transaction( + miner_address=derive_address(miner_verify), + amount=block_reward + sum(tx.fee for tx in regular_transactions), + timestamp=1_739_800_111, + ) + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_800_111, + difficulty_target=1_000_000, + nonce=7, + block_height=1, + ) + block = Block(header=header, transactions=[coinbase, *regular_transactions]) + block.update_header_merkle_root() + return block + + +def test_block_hash_is_deterministic() -> None: + block = _make_block() + assert block.hash() == block.hash() + + +@pytest.mark.parametrize( + ("field", "value"), + [ + ("version", 1), + ("previous_hash", "11" * 32), + ("merkle_root", "22" * 32), + ("timestamp", 1_739_800_222), + ("difficulty_target", 2_000_000), + ("nonce", 8), + ("block_height", 2), + ], +) +def test_changing_header_field_changes_hash(field: str, value: int | str) -> None: + block = _make_block() + mutated_header = replace(block.header, **{field: value}) + + assert block.header.hash() != mutated_header.hash() + + +def test_header_merkle_root_matches_transaction_body() -> None: + block = _make_block() + assert block.has_valid_merkle_root() + + block.transactions[0].amount += 1 + assert not block.has_valid_merkle_root() + + +def test_validate_coinbase_accepts_correct_amount() -> None: + block = _make_block_with_coinbase(block_reward=50) + block.validate_coinbase(block_reward=50) + + +def test_validate_coinbase_rejects_wrong_amount() -> None: + block = _make_block_with_coinbase(block_reward=50) + block.transactions[0].amount += 1 + block.update_header_merkle_root() + + with pytest.raises(BlockValidationError, match="Invalid coinbase amount"): + block.validate_coinbase(block_reward=50) diff --git a/tests/test_block_gossip.py b/tests/test_block_gossip.py new file mode 100644 index 0000000..cd12915 --- /dev/null +++ b/tests/test_block_gossip.py @@ -0,0 +1,144 @@ +"""Integration tests for block propagation across peers.""" + +from __future__ import annotations + +import asyncio + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig + + +def test_block_gossip_propagates_and_applies_on_three_nodes() -> None: + async def scenario() -> None: + manager_a = _build_manager() + manager_b = _build_manager() + manager_c = _build_manager() + + accepted_hashes: dict[str, list[str]] = {"a": [], "b": [], "c": []} + + def make_block_handler(manager: ChainManager, node_name: str): + def handler(block) -> bool: + try: + result = manager.add_block(block) + except ChainValidationError: + return False + if result in {"extended", "reorg"}: + accepted_hashes[node_name].append(block.hash().hex()) + return True + return False + + return handler + + node_b = MiniChainNetwork( + NetworkConfig(host="127.0.0.1", port=0, node_id="node-block-b", enable_mdns=False) + ) + node_b.set_block_handler(make_block_handler(manager_b, "b")) + await node_b.start() + + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-block-a", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_a.set_block_handler(make_block_handler(manager_a, "a")) + await node_a.start() + + node_c = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-block-c", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_c.set_block_handler(make_block_handler(manager_c, "c")) + await node_c.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(2, timeout=3.0) + await node_c.wait_for_connected_peers(1, timeout=3.0) + + candidate = build_candidate_block( + chain_manager=manager_a, + mempool=Mempool(), + miner_address="11" * 20, + max_transactions=0, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block( + block_template=candidate, + max_nonce=100_000, + ) + + assert await node_a.submit_block(mined_block) + + await _wait_until( + lambda: manager_b.height == 1 and manager_c.height == 1, + timeout=3.0, + ) + + expected_tip = mined_block.hash().hex() + assert manager_a.height == 1 + assert manager_b.height == 1 + assert manager_c.height == 1 + assert manager_a.tip_hash == expected_tip + assert manager_b.tip_hash == expected_tip + assert manager_c.tip_hash == expected_tip + + assert len(accepted_hashes["a"]) == 1 + assert len(accepted_hashes["b"]) == 1 + assert len(accepted_hashes["c"]) == 1 + + assert not await node_a.submit_block(mined_block) + await asyncio.sleep(0.2) + assert len(accepted_hashes["a"]) == 1 + assert len(accepted_hashes["b"]) == 1 + assert len(accepted_hashes["c"]) == 1 + finally: + await node_c.stop() + await node_a.stop() + await node_b.stop() + + asyncio.run(scenario()) + + +def _build_manager() -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout") diff --git a/tests/test_chain.py b/tests/test_chain.py new file mode 100644 index 0000000..0b50f5c --- /dev/null +++ b/tests/test_chain.py @@ -0,0 +1,196 @@ +"""Unit tests for chain management and fork resolution.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.transaction import create_coinbase_transaction + + +def _build_manager(*, block_reward: int = 50) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=block_reward, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _coinbase_block( + manager: ChainManager, + *, + parent: Block, + miner_address: str, + timestamp: int, + coinbase_amount: int | None = None, + difficulty_target: int | None = None, +) -> Block: + reward_amount = manager.config.block_reward if coinbase_amount is None else coinbase_amount + target = ( + manager.expected_next_difficulty(parent_hash=parent.hash().hex()) + if difficulty_target is None + else difficulty_target + ) + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=reward_amount, + timestamp=timestamp, + ) + header = BlockHeader( + version=0, + previous_hash=parent.hash().hex(), + merkle_root="", + timestamp=timestamp, + difficulty_target=target, + nonce=0, + block_height=parent.header.block_height + 1, + ) + block = Block(header=header, transactions=[coinbase]) + block.update_header_merkle_root() + return block + + +def test_appends_valid_blocks_to_tip() -> None: + manager = _build_manager(block_reward=50) + miner = "11" * 20 + + block_1 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner, + timestamp=1_739_000_030, + ) + result_1 = manager.add_block(block_1) + assert result_1 == "extended" + assert manager.height == 1 + + block_2 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner, + timestamp=1_739_000_060, + ) + result_2 = manager.add_block(block_2) + assert result_2 == "extended" + assert manager.height == 2 + assert manager.tip_hash == block_2.hash().hex() + assert manager.state.get_account(miner).balance == 100 + + +def test_longer_fork_triggers_reorg_and_state_replay() -> None: + manager = _build_manager(block_reward=50) + miner_a = "11" * 20 + miner_b = "22" * 20 + + a1 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner_a, + timestamp=1_739_000_030, + ) + manager.add_block(a1) + + a2 = _coinbase_block( + manager, + parent=a1, + miner_address=miner_a, + timestamp=1_739_000_060, + ) + assert manager.add_block(a2) == "extended" + assert manager.state.get_account(miner_a).balance == 100 + + b2 = _coinbase_block( + manager, + parent=a1, + miner_address=miner_b, + timestamp=1_739_000_061, + ) + assert manager.add_block(b2) == "stored_fork" + + b3 = _coinbase_block( + manager, + parent=b2, + miner_address=miner_b, + timestamp=1_739_000_090, + ) + assert manager.add_block(b3) == "reorg" + assert manager.tip_hash == b3.hash().hex() + assert manager.height == 3 + assert manager.state.get_account(miner_a).balance == 50 + assert manager.state.get_account(miner_b).balance == 100 + + +def test_rejects_block_with_unknown_parent() -> None: + manager = _build_manager(block_reward=50) + coinbase = create_coinbase_transaction( + miner_address="33" * 20, + amount=50, + timestamp=1_739_000_030, + ) + block = Block( + header=BlockHeader( + version=0, + previous_hash="ff" * 32, + merkle_root="", + timestamp=1_739_000_030, + difficulty_target=MAX_TARGET, + nonce=0, + block_height=1, + ), + transactions=[coinbase], + ) + block.update_header_merkle_root() + + with pytest.raises(ChainValidationError, match="Unknown parent block"): + manager.add_block(block) + + +def test_rejects_invalid_coinbase_amount() -> None: + manager = _build_manager(block_reward=50) + + invalid_block = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address="44" * 20, + timestamp=1_739_000_030, + coinbase_amount=60, + ) + invalid_hash = invalid_block.hash().hex() + with pytest.raises(ChainValidationError, match="State transition failed"): + manager.add_block(invalid_block) + + assert manager.height == 0 + assert not manager.contains_block(invalid_hash) + + +def test_rejects_block_with_wrong_difficulty_target() -> None: + manager = _build_manager(block_reward=50) + expected = manager.expected_next_difficulty(parent_hash=manager.tip_hash) + wrong_target = expected - 1 + + invalid_block = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address="55" * 20, + timestamp=1_739_000_030, + difficulty_target=wrong_target, + ) + + with pytest.raises(ChainValidationError, match="Invalid difficulty target"): + manager.add_block(invalid_block) diff --git a/tests/test_chain_sync.py b/tests/test_chain_sync.py new file mode 100644 index 0000000..42c3eaa --- /dev/null +++ b/tests/test_chain_sync.py @@ -0,0 +1,111 @@ +"""Integration tests for `/minichain/sync/1.0.0` chain synchronization.""" + +from __future__ import annotations + +import asyncio + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig + + +def test_chain_sync_catches_up_shorter_peer() -> None: + async def scenario() -> None: + source_manager = _build_manager() + target_manager = _build_manager() + _mine_blocks(source_manager, count=5) + + source_node = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-sync-source", + enable_mdns=False, + sync_batch_size=2, + ) + ) + source_node.set_sync_handlers( + get_height=lambda: source_manager.height, + get_block_by_height=source_manager.get_canonical_block_by_height, + apply_block=lambda _block: True, + ) + await source_node.start() + + target_node = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-sync-target", + enable_mdns=False, + sync_batch_size=2, + bootstrap_peers=(source_node.listen_address(),), + ) + ) + target_node.set_sync_handlers( + get_height=lambda: target_manager.height, + get_block_by_height=target_manager.get_canonical_block_by_height, + apply_block=lambda block: _apply_block(target_manager, block), + ) + await target_node.start() + + try: + await source_node.wait_for_connected_peers(1, timeout=3.0) + await target_node.wait_for_connected_peers(1, timeout=3.0) + + await target_node.wait_for_height(source_manager.height, timeout=5.0) + assert target_manager.height == source_manager.height + assert target_manager.tip_hash == source_manager.tip_hash + finally: + await target_node.stop() + await source_node.stop() + + asyncio.run(scenario()) + + +def _build_manager() -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _mine_blocks(manager: ChainManager, *, count: int) -> None: + for _ in range(count): + timestamp = manager.tip_block.header.timestamp + 30 + candidate = build_candidate_block( + chain_manager=manager, + mempool=Mempool(), + miner_address="aa" * 20, + max_transactions=0, + timestamp=timestamp, + ) + block, _digest = mine_candidate_block(block_template=candidate, max_nonce=0) + result = manager.add_block(block) + assert result == "extended" + + +def _apply_block(manager: ChainManager, block) -> bool: + try: + result = manager.add_block(block) + except ChainValidationError: + return False + return result in {"extended", "reorg", "duplicate"} diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..400cebc --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,345 @@ +"""Unit tests for CLI command parsing and end-to-end command flow.""" + +from __future__ import annotations + +import json +import os + +import pytest + +pytest.importorskip("nacl") + +from minichain.__main__ import build_parser, main +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + deserialize_verify_key, + generate_key_pair, + serialize_signing_key, +) + + +def _parse_kv_lines(text: str) -> dict[str, str]: + pairs: dict[str, str] = {} + for line in text.strip().splitlines(): + if "=" in line: + key, value = line.split("=", 1) + pairs[key.strip()] = value.strip() + return pairs + + +def _extract_json_payload(text: str) -> dict[str, object]: + start = text.find("{") + end = text.rfind("}") + if start < 0 or end < 0 or end <= start: + raise ValueError("no JSON object found in output") + return json.loads(text[start : end + 1]) + + +def test_parser_defaults() -> None: + args = build_parser().parse_args([]) + assert args.host == "127.0.0.1" + assert args.port == 7000 + assert args.command is None + + +def test_namespaced_parser_supports_wallet_commands() -> None: + args = build_parser().parse_args(["wallet", "generate-key"]) + assert args.command == "wallet" + assert args.wallet_command == "generate-key" + assert args.action == "wallet_generate_key" + + +def test_parser_supports_node_run_flags() -> None: + args = build_parser().parse_args( + [ + "node", + "run", + "--peer", + "127.0.0.1:7001", + "--peer", + "127.0.0.1:7002", + "--mine", + ] + ) + assert args.action == "node_run" + assert args.mine is True + assert args.peer == ["127.0.0.1:7001", "127.0.0.1:7002"] + + +def test_parser_supports_node_stop_flags() -> None: + args = build_parser().parse_args(["node", "stop", "--timeout-seconds", "2.5", "--force"]) + assert args.action == "node_stop" + assert args.timeout_seconds == 2.5 + assert args.force is True + + +def test_top_level_help_shows_full_command_tree(capsys: pytest.CaptureFixture[str]) -> None: + with pytest.raises(SystemExit): + main(["--help"]) + out = capsys.readouterr().out + assert "Command Tree" in out + assert "node run" in out + assert "node stop" in out + assert "wallet details" in out + assert "chain accounts" in out + + +def test_generate_key_outputs_valid_material(capsys: pytest.CaptureFixture[str]) -> None: + main(["generate-key"]) + out = capsys.readouterr().out + values = _parse_kv_lines(out) + + assert "private_key" in values + assert "public_key" in values + assert "address" in values + + signing_key = deserialize_signing_key(values["private_key"]) + verify_key = deserialize_verify_key(values["public_key"]) + assert signing_key.verify_key == verify_key + assert derive_address(verify_key) == values["address"] + + +def test_shell_mode_executes_wallet_generate_key( + monkeypatch: pytest.MonkeyPatch, + capsys: pytest.CaptureFixture[str], +) -> None: + inputs = iter(["wallet generate-key", "exit"]) + monkeypatch.setattr("builtins.input", lambda _prompt: next(inputs)) + + main(["shell"]) + out = capsys.readouterr().out + values = _parse_kv_lines(out) + assert "private_key" in values + assert "public_key" in values + assert "address" in values + + +def test_mine_chain_info_and_balance_commands( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + miner_key, miner_verify = generate_key_pair() + _ = miner_key + miner_address = derive_address(miner_verify) + data_dir = tmp_path / "cli-node" + + main( + [ + "--data-dir", + str(data_dir), + "--miner-address", + miner_address, + "mine", + "--count", + "1", + ] + ) + mined_output = capsys.readouterr().out + assert "mined_block_1=height:1" in mined_output + + main(["--data-dir", str(data_dir), "chain", "info"]) + chain_info = _parse_kv_lines(capsys.readouterr().out) + assert chain_info["height"] == "1" + assert len(chain_info["tip_hash"]) == 64 + assert chain_info["connected_peers"] == "0" + + main(["--data-dir", str(data_dir), "wallet", "balance", "--address", miner_address]) + balance_info = _parse_kv_lines(capsys.readouterr().out) + assert balance_info["address"] == miner_address + assert balance_info["balance"] == "50" + assert balance_info["nonce"] == "0" + + main(["--data-dir", str(data_dir), "wallet", "details", "--address", miner_address]) + details = _parse_kv_lines(capsys.readouterr().out) + assert details["exists"] == "true" + + main(["--data-dir", str(data_dir), "wallet", "list"]) + listed = capsys.readouterr().out + assert "Wallet Accounts" in listed + assert miner_address in listed + + +def test_submit_tx_then_query_balances_and_block( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + private_key_hex = serialize_signing_key(sender_key) + data_dir = tmp_path / "cli-node" + + main( + [ + "--data-dir", + str(data_dir), + "--miner-address", + sender, + "mine", + "--count", + "1", + ] + ) + _ = capsys.readouterr() + + main( + [ + "--data-dir", + str(data_dir), + "tx", + "submit", + "--private-key", + private_key_hex, + "--recipient", + recipient, + "--amount", + "10", + "--fee", + "2", + ] + ) + submit_output = _parse_kv_lines(capsys.readouterr().out) + assert "submitted_tx_id" in submit_output + assert submit_output["sender"] == sender + assert submit_output["recipient"] == recipient + assert submit_output["mined_block_height"] == "2" + + main(["--data-dir", str(data_dir), "wallet", "balance", "--address", sender]) + sender_balance = _parse_kv_lines(capsys.readouterr().out) + assert sender_balance["balance"] == "90" + assert sender_balance["nonce"] == "1" + + main(["--data-dir", str(data_dir), "wallet", "balance", "--address", recipient]) + recipient_balance = _parse_kv_lines(capsys.readouterr().out) + assert recipient_balance["balance"] == "10" + assert recipient_balance["nonce"] == "0" + + main(["--data-dir", str(data_dir), "chain", "block", "--height", "2"]) + block_output = capsys.readouterr().out + block_payload = _extract_json_payload(block_output) + assert block_payload["header"]["block_height"] == 2 + assert len(block_payload["transactions"]) == 2 + + +def test_chain_info_reads_connected_peers_from_daemon_runtime_status( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + miner_key, miner_verify = generate_key_pair() + _ = miner_key + miner_address = derive_address(miner_verify) + data_dir = tmp_path / "daemon-status" + data_dir.mkdir(parents=True, exist_ok=True) + + main( + [ + "--data-dir", + str(data_dir), + "--miner-address", + miner_address, + "mine", + "--count", + "1", + ] + ) + _ = capsys.readouterr() + + (data_dir / "node.pid").write_text(f"{os.getpid()}\n", encoding="utf-8") + (data_dir / "node_runtime_status.json").write_text( + json.dumps({"connected_peers": 2}), + encoding="utf-8", + ) + + main(["--data-dir", str(data_dir), "chain", "info"]) + chain_info = _parse_kv_lines(capsys.readouterr().out) + assert chain_info["height"] == "1" + assert chain_info["connected_peers"] == "2" + + +def test_node_stop_removes_stale_pid_file( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + data_dir = tmp_path / "daemon" + data_dir.mkdir(parents=True, exist_ok=True) + pid_file = data_dir / "node.pid" + stale_pid = 999_999 + pid_file.write_text(f"{stale_pid}\n", encoding="utf-8") + + main(["--data-dir", str(data_dir), "node", "stop"]) + output = _parse_kv_lines(capsys.readouterr().out) + assert output["status"] == "not_running" + assert output["reason"] == "stale_pid_file_removed" + assert output["stale_pid"] == str(stale_pid) + assert not pid_file.exists() + + +def test_submit_tx_uses_network_path_when_daemon_running( + tmp_path: pytest.TempPathFactory, + monkeypatch: pytest.MonkeyPatch, + capsys: pytest.CaptureFixture[str], +) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + private_key_hex = serialize_signing_key(sender_key) + data_dir = tmp_path / "daemon-node" + data_dir.mkdir(parents=True, exist_ok=True) + (data_dir / "node.pid").write_text(f"{os.getpid()}\n", encoding="utf-8") + + captured: dict[str, object] = {} + + async def fake_submit_transaction_to_peer(*, transaction, host, port, timeout_seconds): + captured["sender"] = transaction.sender + captured["recipient"] = transaction.recipient + captured["nonce"] = transaction.nonce + captured["host"] = host + captured["port"] = port + captured["timeout_seconds"] = timeout_seconds + return True, "accepted" + + monkeypatch.setattr( + "minichain.__main__._submit_transaction_to_peer", + fake_submit_transaction_to_peer, + ) + monkeypatch.setattr( + "minichain.__main__._infer_sender_nonce_from_data_dir", + lambda **_kwargs: 5, + ) + + main( + [ + "--data-dir", + str(data_dir), + "--host", + "127.0.0.1", + "--port", + "7000", + "tx", + "submit", + "--private-key", + private_key_hex, + "--recipient", + recipient, + "--amount", + "3", + "--fee", + "1", + "--no-mine-now", + ] + ) + submit_output = _parse_kv_lines(capsys.readouterr().out) + assert submit_output["submitted_via"] == "network" + assert submit_output["peer"] == "127.0.0.1:7000" + assert submit_output["queued_in_mempool"] == "true" + assert submit_output["nonce"] == "5" + assert captured["sender"] == sender + assert captured["recipient"] == recipient + assert captured["nonce"] == 5 diff --git a/tests/test_comprehensive_integration.py b/tests/test_comprehensive_integration.py new file mode 100644 index 0000000..f219006 --- /dev/null +++ b/tests/test_comprehensive_integration.py @@ -0,0 +1,325 @@ +"""Comprehensive multi-node integration scenarios for v0.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.crypto import derive_address, generate_key_pair +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool, MempoolValidationError +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig, PeerAddress +from minichain.transaction import Transaction + + +@dataclass +class _IntegratedNode: + manager: ChainManager + mempool: Mempool + network: MiniChainNetwork + + +def test_three_node_network_mining_converges() -> None: + async def scenario() -> None: + node_b = _build_node(node_id="node-int-b", bootstrap_peers=()) + await node_b.network.start() + + node_a = _build_node( + node_id="node-int-a", + bootstrap_peers=(node_b.network.listen_address(),), + ) + node_c = _build_node( + node_id="node-int-c", + bootstrap_peers=(node_b.network.listen_address(),), + ) + await node_a.network.start() + await node_c.network.start() + + nodes = [node_a, node_b, node_c] + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(2, timeout=3.0) + await node_c.network.wait_for_connected_peers(1, timeout=3.0) + + miners = ["11" * 20, "22" * 20, "33" * 20] + + for expected_height, (mining_node, miner_address) in enumerate( + zip(nodes, miners), + start=1, + ): + await _mine_and_broadcast(mining_node, miner_address) + await _wait_until( + lambda: all(node.manager.height >= expected_height for node in nodes), + timeout=5.0, + ) + await _wait_until( + lambda: len({node.manager.tip_hash for node in nodes}) == 1 + and all(node.manager.height == expected_height for node in nodes), + timeout=5.0, + ) + + assert all(node.manager.height == 3 for node in nodes) + assert len({node.manager.tip_hash for node in nodes}) == 1 + finally: + await node_c.network.stop() + await node_a.network.stop() + await node_b.network.stop() + + asyncio.run(scenario()) + + +def test_competing_blocks_trigger_fork_then_reorg_convergence() -> None: + async def scenario() -> None: + node_a = _build_node(node_id="node-fork-a", bootstrap_peers=()) + await node_a.network.start() + node_b = _build_node( + node_id="node-fork-b", + bootstrap_peers=(node_a.network.listen_address(),), + ) + await node_b.network.start() + + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(1, timeout=3.0) + + timestamp = node_a.manager.tip_block.header.timestamp + 30 + block_a = _build_mined_block(node_a, miner_address="44" * 20, timestamp=timestamp) + block_b = _build_mined_block(node_b, miner_address="55" * 20, timestamp=timestamp) + + await asyncio.gather( + _apply_and_broadcast_block(node_a, block_a), + _apply_and_broadcast_block(node_b, block_b), + ) + + await _wait_until( + lambda: node_a.manager.height == 1 and node_b.manager.height == 1, + timeout=3.0, + ) + assert node_a.manager.tip_hash != node_b.manager.tip_hash + + resolved = await _mine_and_broadcast(node_a, "44" * 20) + await _wait_until( + lambda: node_a.manager.height == 2 and node_b.manager.height == 2, + timeout=5.0, + ) + assert node_a.manager.tip_hash == resolved.hash().hex() + assert node_b.manager.tip_hash == resolved.hash().hex() + finally: + await node_b.network.stop() + await node_a.network.stop() + + asyncio.run(scenario()) + + +def test_double_spend_nonce_reuse_is_rejected_and_not_forwarded() -> None: + async def scenario() -> None: + signing_key, verify_key = generate_key_pair() + sender = derive_address(verify_key) + balances = {sender: 200} + + node_b = _build_node( + node_id="node-ds-b", + bootstrap_peers=(), + initial_balances=balances, + ) + await node_b.network.start() + node_a = _build_node( + node_id="node-ds-a", + bootstrap_peers=(node_b.network.listen_address(),), + initial_balances=balances, + ) + node_c = _build_node( + node_id="node-ds-c", + bootstrap_peers=(node_b.network.listen_address(),), + initial_balances=balances, + ) + await node_a.network.start() + await node_c.network.start() + + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(2, timeout=3.0) + await node_c.network.wait_for_connected_peers(1, timeout=3.0) + + tx_one = _signed_transaction( + signing_key=signing_key, + sender=sender, + recipient="77" * 20, + amount=25, + nonce=0, + fee=1, + timestamp=1_739_000_010, + ) + tx_two = _signed_transaction( + signing_key=signing_key, + sender=sender, + recipient="88" * 20, + amount=30, + nonce=0, + fee=1, + timestamp=1_739_000_011, + ) + + tx_one_id = tx_one.transaction_id().hex() + tx_two_id = tx_two.transaction_id().hex() + + assert await node_a.network.submit_transaction(tx_one) + await _wait_until( + lambda: node_a.mempool.contains(tx_one_id) + and node_b.mempool.contains(tx_one_id) + and node_c.mempool.contains(tx_one_id), + timeout=3.0, + ) + + assert not await node_a.network.submit_transaction(tx_two) + await asyncio.sleep(0.2) + assert not node_a.mempool.contains(tx_two_id) + assert not node_b.mempool.contains(tx_two_id) + assert not node_c.mempool.contains(tx_two_id) + finally: + await node_c.network.stop() + await node_a.network.stop() + await node_b.network.stop() + + asyncio.run(scenario()) + + +def _build_node( + *, + node_id: str, + bootstrap_peers: tuple[PeerAddress, ...], + initial_balances: dict[str, int] | None = None, +) -> _IntegratedNode: + manager = _build_manager(initial_balances=initial_balances or {}) + mempool = Mempool(max_size=200, max_age_seconds=3_600) + network = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id=node_id, + enable_mdns=False, + bootstrap_peers=bootstrap_peers, + sync_batch_size=4, + ) + ) + node = _IntegratedNode(manager=manager, mempool=mempool, network=network) + + network.set_transaction_handler(lambda transaction: _accept_transaction(node, transaction)) + network.set_block_handler(lambda block: _apply_block(node, block)) + network.set_sync_handlers( + get_height=lambda: node.manager.height, + get_block_by_height=node.manager.get_canonical_block_by_height, + apply_block=lambda block: _apply_block(node, block), + ) + return node + + +def _build_manager(*, initial_balances: dict[str, int]) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances=initial_balances, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=1_000_000, + target_block_time_seconds=30, + ), + ) + + +def _build_mined_block( + node: _IntegratedNode, + *, + miner_address: str, + timestamp: int | None = None, +) -> Block: + candidate = build_candidate_block( + chain_manager=node.manager, + mempool=node.mempool, + miner_address=miner_address, + max_transactions=500, + timestamp=timestamp, + ) + block, _digest = mine_candidate_block(block_template=candidate, max_nonce=0) + return block + + +async def _mine_and_broadcast(node: _IntegratedNode, miner_address: str) -> Block: + next_timestamp = node.manager.tip_block.header.timestamp + 30 + block = _build_mined_block( + node, + miner_address=miner_address, + timestamp=next_timestamp, + ) + await _apply_and_broadcast_block(node, block) + return block + + +async def _apply_and_broadcast_block(node: _IntegratedNode, block: Block) -> None: + assert _apply_block(node, block) + sent = await node.network.submit_block(block) + assert sent + + +def _apply_block(node: _IntegratedNode, block: Block) -> bool: + try: + result = node.manager.add_block(block) + except ChainValidationError: + return False + + if result in {"extended", "reorg"}: + node.mempool.remove_confirmed_transactions(block.transactions, node.manager.state) + return result in {"extended", "reorg", "stored_fork", "duplicate"} + + +def _accept_transaction(node: _IntegratedNode, transaction: Transaction) -> bool: + try: + node.mempool.add_transaction(transaction, node.manager.state) + except MempoolValidationError: + return False + return True + + +def _signed_transaction( + *, + signing_key: object, + sender: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + transaction = Transaction( + sender=sender, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + transaction.sign(signing_key) + return transaction + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout") diff --git a/tests/test_consensus.py b/tests/test_consensus.py new file mode 100644 index 0000000..9e7dcfb --- /dev/null +++ b/tests/test_consensus.py @@ -0,0 +1,168 @@ +"""Unit tests for Proof-of-Work mining primitives.""" + +from __future__ import annotations + +from threading import Event + +from minichain.block import BlockHeader +from minichain.consensus import ( + MiningInterrupted, + compute_next_difficulty_target, + is_valid_pow, + mine_block_header, +) + + +def _header_template(difficulty_target: int) -> BlockHeader: + return BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="11" * 32, + timestamp=1_740_000_000, + difficulty_target=difficulty_target, + nonce=0, + block_height=10, + ) + + +def _make_chain( + *, + heights: list[int], + timestamps: list[int], + difficulty_target: int, +) -> list[BlockHeader]: + if len(heights) != len(timestamps): + raise ValueError("heights and timestamps must have the same length") + return [ + BlockHeader( + version=0, + previous_hash=f"{height:064x}", + merkle_root="22" * 32, + timestamp=timestamp, + difficulty_target=difficulty_target, + nonce=0, + block_height=height, + ) + for height, timestamp in zip(heights, timestamps, strict=True) + ] + + +def test_valid_pow_is_accepted() -> None: + header = _header_template(difficulty_target=(1 << 256) - 1) + assert is_valid_pow(header) + + +def test_invalid_pow_is_rejected() -> None: + header = _header_template(difficulty_target=1) + assert not is_valid_pow(header) + + +def test_mining_finds_valid_nonce_for_reasonable_target() -> None: + header = _header_template(difficulty_target=1 << 252) + nonce, _digest = mine_block_header(header, max_nonce=500_000) + + mined_header = BlockHeader( + version=header.version, + previous_hash=header.previous_hash, + merkle_root=header.merkle_root, + timestamp=header.timestamp, + difficulty_target=header.difficulty_target, + nonce=nonce, + block_height=header.block_height, + ) + assert is_valid_pow(mined_header) + + +def test_mining_honors_stop_event() -> None: + header = _header_template(difficulty_target=1 << 240) + stop = Event() + stop.set() + + try: + mine_block_header(header, max_nonce=1_000_000, stop_event=stop) + except MiningInterrupted as exc: + assert "interrupted" in str(exc).lower() + else: + raise AssertionError("Expected mining interruption") + + +def test_mining_raises_when_nonce_range_exhausted() -> None: + header = _header_template(difficulty_target=1) + try: + mine_block_header(header, start_nonce=0, max_nonce=10) + except RuntimeError as exc: + assert "No valid nonce found" in str(exc) + else: + raise AssertionError("Expected RuntimeError when nonce space exhausted") + + +def test_difficulty_unchanged_when_not_on_adjustment_height() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 5], + timestamps=[0, 10, 20, 30, 40], + difficulty_target=1_000_000, + ) + assert ( + compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + == 1_000_000 + ) + + +def test_difficulty_target_decreases_when_blocks_are_fast() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 5, 10, 15, 20], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 500_000 + + +def test_difficulty_target_increases_when_blocks_are_slow() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 20, 40, 60, 80], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 2_000_000 + + +def test_difficulty_adjustment_is_capped_to_half_on_extreme_speed() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 1, 2, 3, 4], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 500_000 + + +def test_difficulty_adjustment_is_capped_to_double_on_extreme_delay() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 100, 200, 300, 400], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 2_000_000 diff --git a/tests/test_crypto.py b/tests/test_crypto.py new file mode 100644 index 0000000..2b40967 --- /dev/null +++ b/tests/test_crypto.py @@ -0,0 +1,63 @@ +"""Unit tests for the cryptographic identity module.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + deserialize_verify_key, + generate_key_pair, + serialize_signing_key, + serialize_verify_key, + sign_message, + verify_signature, +) + + +def test_generated_key_pair_can_sign_and_verify() -> None: + signing_key, verify_key = generate_key_pair() + message = b"minichain-crypto-test" + + signature = sign_message(message, signing_key) + + assert verify_signature(message, signature, verify_key) + + +def test_address_derivation_is_deterministic() -> None: + signing_key, verify_key = generate_key_pair() + first = derive_address(verify_key) + second = derive_address(verify_key) + + assert first == second + assert first == derive_address(signing_key.verify_key) + assert len(first) == 40 + + +def test_invalid_signature_is_rejected() -> None: + signing_key, verify_key = generate_key_pair() + other_signing_key, _ = generate_key_pair() + message = b"minichain-message" + + wrong_signature = sign_message(message, other_signing_key) + + assert not verify_signature(message, wrong_signature, verify_key) + + +def test_key_hex_serialization_round_trip() -> None: + signing_key, verify_key = generate_key_pair() + + signing_key_hex = serialize_signing_key(signing_key) + verify_key_hex = serialize_verify_key(verify_key) + + decoded_signing_key = deserialize_signing_key(signing_key_hex) + decoded_verify_key = deserialize_verify_key(verify_key_hex) + + message = b"serialization-round-trip" + signature = sign_message(message, decoded_signing_key) + + assert verify_signature(message, signature, decoded_verify_key) + assert derive_address(decoded_verify_key) == derive_address(verify_key) diff --git a/tests/test_genesis.py b/tests/test_genesis.py new file mode 100644 index 0000000..1db85b4 --- /dev/null +++ b/tests/test_genesis.py @@ -0,0 +1,85 @@ +"""Unit tests for genesis block and state initialization.""" + +from __future__ import annotations + +from dataclasses import replace + +from minichain.crypto import blake2b_digest +from minichain.genesis import ( + GENESIS_PREVIOUS_HASH, + GenesisConfig, + apply_genesis_block, + create_genesis_block, + create_genesis_state, +) +from minichain.state import Account, State + + +def test_create_genesis_block_uses_conventional_fields() -> None: + config = GenesisConfig( + initial_balances={"11" * 20: 1_000_000}, + timestamp=1_739_123_456, + difficulty_target=123_456, + version=0, + ) + + block = create_genesis_block(config) + + assert block.header.block_height == 0 + assert block.header.previous_hash == GENESIS_PREVIOUS_HASH + assert block.header.timestamp == config.timestamp + assert block.header.difficulty_target == config.difficulty_target + assert block.header.nonce == 0 + assert block.header.merkle_root == blake2b_digest(b"").hex() + assert block.transactions == [] + + +def test_apply_genesis_block_initializes_expected_balances() -> None: + balances = {"aa" * 20: 500, "bb" * 20: 300} + config = GenesisConfig(initial_balances=balances) + block = create_genesis_block(config) + state = State() + + apply_genesis_block(state, block, config) + + assert state.get_account("aa" * 20).balance == 500 + assert state.get_account("aa" * 20).nonce == 0 + assert state.get_account("bb" * 20).balance == 300 + assert state.get_account("bb" * 20).nonce == 0 + + +def test_create_genesis_state_builds_block_and_state() -> None: + config = GenesisConfig(initial_balances={"cc" * 20: 42}) + + block, state = create_genesis_state(config) + + assert block.header.block_height == 0 + assert state.get_account("cc" * 20).balance == 42 + + +def test_genesis_requires_empty_state() -> None: + config = GenesisConfig(initial_balances={"dd" * 20: 1}) + block = create_genesis_block(config) + state = State() + state.set_account("ff" * 20, Account(balance=1, nonce=0)) + + try: + apply_genesis_block(state, block, config) + except ValueError as exc: + assert "empty state" in str(exc) + else: + raise AssertionError("Expected ValueError for non-empty state") + + +def test_genesis_block_rejects_wrong_previous_hash() -> None: + config = GenesisConfig(initial_balances={"ee" * 20: 10}) + block = create_genesis_block(config) + block.header = replace(block.header, previous_hash="11" * 32) + state = State() + + try: + apply_genesis_block(state, block, config) + except ValueError as exc: + assert "previous_hash" in str(exc) + else: + raise AssertionError("Expected ValueError for invalid previous_hash") diff --git a/tests/test_mempool.py b/tests/test_mempool.py new file mode 100644 index 0000000..8cc2d8a --- /dev/null +++ b/tests/test_mempool.py @@ -0,0 +1,313 @@ +"""Unit tests for mempool transaction queuing and selection behavior.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import derive_address, generate_key_pair +from minichain.mempool import Mempool, MempoolValidationError +from minichain.state import Account, State +from minichain.transaction import Transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int = 1_739_950_000, +) -> Transaction: + transaction = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp + nonce, + ) + transaction.sign(sender_key) + return transaction + + +def test_deduplicates_transactions_by_id() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + + mempool.add_transaction(tx, state) + with pytest.raises(MempoolValidationError, match="Duplicate transaction"): + mempool.add_transaction(tx, state) + + +def test_fee_priority_respects_sender_nonce_ordering() -> None: + a_key, a_verify = generate_key_pair() + b_key, b_verify = generate_key_pair() + c_key, c_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_a = derive_address(a_verify) + sender_b = derive_address(b_verify) + sender_c = derive_address(c_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender_a, Account(balance=100, nonce=0)) + state.set_account(sender_b, Account(balance=100, nonce=0)) + state.set_account(sender_c, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx_a0 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_a1 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=5, + nonce=1, + fee=10, + ) + tx_b0 = _signed_transaction( + sender_key=b_key, + sender_address=sender_b, + recipient=recipient, + amount=5, + nonce=0, + fee=8, + ) + tx_c0 = _signed_transaction( + sender_key=c_key, + sender_address=sender_c, + recipient=recipient, + amount=5, + nonce=0, + fee=4, + ) + + mempool.add_transaction(tx_a1, state) + mempool.add_transaction(tx_b0, state) + mempool.add_transaction(tx_a0, state) + mempool.add_transaction(tx_c0, state) + + selected = mempool.get_transactions_for_mining(state, limit=4) + + assert [tx.fee for tx in selected] == [8, 4, 1, 10] + assert selected[2].sender == sender_a and selected[2].nonce == 0 + assert selected[3].sender == sender_a and selected[3].nonce == 1 + + +def test_evicts_low_fee_when_pool_exceeds_max_size() -> None: + s1_key, s1_verify = generate_key_pair() + s2_key, s2_verify = generate_key_pair() + s3_key, s3_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender1 = derive_address(s1_verify) + sender2 = derive_address(s2_verify) + sender3 = derive_address(s3_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender1, Account(balance=100, nonce=0)) + state.set_account(sender2, Account(balance=100, nonce=0)) + state.set_account(sender3, Account(balance=100, nonce=0)) + mempool = Mempool(max_size=2, max_age_seconds=10_000) + + tx1 = _signed_transaction( + sender_key=s1_key, + sender_address=sender1, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx2 = _signed_transaction( + sender_key=s2_key, + sender_address=sender2, + recipient=recipient, + amount=5, + nonce=0, + fee=6, + ) + tx3 = _signed_transaction( + sender_key=s3_key, + sender_address=sender3, + recipient=recipient, + amount=5, + nonce=0, + fee=3, + ) + + id1 = mempool.add_transaction(tx1, state, received_at=1) + id2 = mempool.add_transaction(tx2, state, received_at=2) + id3 = mempool.add_transaction(tx3, state, received_at=3) + + assert mempool.size() == 2 + assert not mempool.contains(id1) + assert mempool.contains(id2) + assert mempool.contains(id3) + + +def test_nonce_gap_is_held_then_promoted_when_gap_filled() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx_nonce_1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=1, + fee=5, + ) + tx_nonce_0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + + mempool.add_transaction(tx_nonce_1, state) + assert mempool.ready_count() == 0 + assert mempool.waiting_count() == 1 + + mempool.add_transaction(tx_nonce_0, state) + assert mempool.ready_count() == 2 + assert mempool.waiting_count() == 0 + + selected = mempool.get_transactions_for_mining(state, limit=2) + assert [tx.nonce for tx in selected] == [0, 1] + + +def test_confirmed_transaction_removal_revalidates_pending_set() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=0, + fee=2, + ) + tx1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=1, + fee=1, + ) + + mempool.add_transaction(tx0, state) + mempool.add_transaction(tx1, state) + assert mempool.size() == 2 + assert mempool.ready_count() == 2 + + state.apply_transaction(tx0) + mempool.remove_confirmed_transactions([tx0], state) + + assert mempool.size() == 1 + assert mempool.ready_count() == 1 + selected = mempool.get_transactions_for_mining(state, limit=1) + assert selected[0].nonce == 1 + + +def test_rejects_duplicate_sender_nonce_even_if_tx_id_differs() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_modified = replace(tx, amount=6) + tx_modified.sign(sender_key) + + mempool.add_transaction(tx, state) + with pytest.raises(MempoolValidationError, match="Duplicate sender nonce"): + mempool.add_transaction(tx_modified, state) + + +def test_evicts_stale_transactions_by_age() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool(max_size=10, max_age_seconds=10) + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_id = mempool.add_transaction(tx, state, received_at=100) + + evicted = mempool.evict(state, current_time=111) + assert tx_id in evicted + assert mempool.size() == 0 diff --git a/tests/test_merkle.py b/tests/test_merkle.py new file mode 100644 index 0000000..a271f75 --- /dev/null +++ b/tests/test_merkle.py @@ -0,0 +1,37 @@ +"""Unit tests for Merkle tree construction.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import blake2b_digest +from minichain.merkle import compute_merkle_root + + +def test_empty_leaf_list_has_well_defined_root() -> None: + assert compute_merkle_root([]) == blake2b_digest(b"") + + +def test_merkle_root_is_deterministic() -> None: + leaves = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + first = compute_merkle_root(leaves) + second = compute_merkle_root(list(leaves)) + assert first == second + + +def test_merkle_root_changes_when_leaf_changes() -> None: + base = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + modified = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b-mutated"), blake2b_digest(b"tx-c")] + assert compute_merkle_root(base) != compute_merkle_root(modified) + + +def test_odd_leaf_count_duplicates_last_leaf() -> None: + leaves = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + + left = blake2b_digest(leaves[0] + leaves[1]) + right = blake2b_digest(leaves[2] + leaves[2]) + expected = blake2b_digest(left + right) + + assert compute_merkle_root(leaves) == expected diff --git a/tests/test_mining.py b/tests/test_mining.py new file mode 100644 index 0000000..dde750b --- /dev/null +++ b/tests/test_mining.py @@ -0,0 +1,223 @@ +"""Unit tests for candidate block construction and mining flow.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager +from minichain.consensus import MAX_TARGET, is_valid_pow +from minichain.crypto import derive_address, generate_key_pair +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.transaction import Transaction + + +def _build_manager( + *, + initial_balances: dict[str, int], + genesis_target: int = MAX_TARGET, +) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances=initial_balances, + timestamp=1_739_000_000, + difficulty_target=genesis_target, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def test_candidate_block_selects_by_fee_with_sender_nonce_ordering() -> None: + a_key, a_verify = generate_key_pair() + b_key, b_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_a = derive_address(a_verify) + sender_b = derive_address(b_verify) + recipient = derive_address(recipient_verify) + + manager = _build_manager(initial_balances={sender_a: 100, sender_b: 100}) + mempool = Mempool() + + tx_a1 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=10, + nonce=1, + fee=10, + timestamp=1_739_000_010, + ) + tx_b0 = _signed_transaction( + sender_key=b_key, + sender_address=sender_b, + recipient=recipient, + amount=10, + nonce=0, + fee=8, + timestamp=1_739_000_011, + ) + tx_a0 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=10, + nonce=0, + fee=1, + timestamp=1_739_000_012, + ) + + mempool.add_transaction(tx_a1, manager.state) + mempool.add_transaction(tx_b0, manager.state) + mempool.add_transaction(tx_a0, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="11" * 20, + max_transactions=3, + timestamp=1_739_000_030, + ) + + assert candidate.header.previous_hash == manager.tip_hash + assert candidate.header.block_height == manager.height + 1 + assert candidate.header.difficulty_target == manager.expected_next_difficulty() + assert candidate.transactions[0].is_coinbase() + assert [tx.fee for tx in candidate.transactions[1:]] == [8, 1, 10] + assert [tx.nonce for tx in candidate.transactions[1:] if tx.sender == sender_a] == [0, 1] + + total_fees = sum(tx.fee for tx in candidate.transactions[1:]) + assert candidate.transactions[0].amount == manager.config.block_reward + total_fees + assert candidate.has_valid_merkle_root() + + +def test_candidate_block_respects_max_transaction_limit() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + manager = _build_manager(initial_balances={sender: 200}) + mempool = Mempool() + + tx0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + timestamp=1_739_000_010, + ) + tx1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=1, + fee=2, + timestamp=1_739_000_011, + ) + mempool.add_transaction(tx0, manager.state) + mempool.add_transaction(tx1, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="22" * 20, + max_transactions=1, + timestamp=1_739_000_030, + ) + + assert len(candidate.transactions) == 2 + assert candidate.transactions[1].nonce == 0 + assert candidate.transactions[0].amount == manager.config.block_reward + tx0.fee + + +def test_mined_candidate_block_is_accepted_by_chain_manager() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + manager = _build_manager(initial_balances={sender: 100}, genesis_target=1 << 252) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=0, + fee=2, + timestamp=1_739_000_010, + ) + mempool.add_transaction(tx, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="33" * 20, + max_transactions=10, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block( + block_template=candidate, + max_nonce=500_000, + ) + + assert is_valid_pow(mined_block.header) + result = manager.add_block(mined_block) + assert result == "extended" + assert manager.height == 1 + assert manager.state.get_account("33" * 20).balance == manager.config.block_reward + tx.fee + + +def test_candidate_block_timestamp_is_monotonic() -> None: + manager = _build_manager(initial_balances={}) + mempool = Mempool() + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="44" * 20, + max_transactions=0, + timestamp=manager.tip_block.header.timestamp - 10, + ) + + assert candidate.header.timestamp == manager.tip_block.header.timestamp + 1 diff --git a/tests/test_network.py b/tests/test_network.py new file mode 100644 index 0000000..f3ebd28 --- /dev/null +++ b/tests/test_network.py @@ -0,0 +1,142 @@ +"""Integration tests for peer networking and discovery.""" + +from __future__ import annotations + +import asyncio +import socket + +from minichain.network import MiniChainNetwork, NetworkConfig, PeerAddress + + +def test_bootstrap_peer_discovery() -> None: + async def scenario() -> None: + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-a", + enable_mdns=False, + ) + ) + await node_a.start() + + node_b = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-b", + enable_mdns=False, + bootstrap_peers=(node_a.listen_address(),), + ) + ) + await node_b.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(1, timeout=3.0) + + assert node_a.is_connected_to("node-b") + assert node_b.is_connected_to("node-a") + finally: + await node_b.stop() + await node_a.stop() + + asyncio.run(scenario()) + + +def test_mdns_discovery_connects_two_nodes() -> None: + async def scenario() -> None: + mdns_port = _pick_free_udp_port() + config_a = NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-mdns-a", + enable_mdns=True, + mdns_group="224.1.1.199", + mdns_port=mdns_port, + mdns_interval_seconds=0.2, + ) + config_b = NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-mdns-b", + enable_mdns=True, + mdns_group="224.1.1.199", + mdns_port=mdns_port, + mdns_interval_seconds=0.2, + ) + node_a = MiniChainNetwork(config_a) + node_b = MiniChainNetwork(config_b) + await node_a.start() + await node_b.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=5.0) + await node_b.wait_for_connected_peers(1, timeout=5.0) + + assert node_a.is_connected_to("node-mdns-b") + assert node_b.is_connected_to("node-mdns-a") + finally: + await node_b.stop() + await node_a.stop() + + asyncio.run(scenario()) + + +def test_bootstrap_reconnects_when_peer_starts_late() -> None: + async def scenario() -> None: + listen_port = _pick_free_tcp_port() + node_b = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-reconnect-b", + enable_mdns=False, + bootstrap_peers=(PeerAddress(host="127.0.0.1", port=listen_port),), + reconnect_interval_seconds=0.1, + connect_timeout_seconds=0.1, + ) + ) + await node_b.start() + await asyncio.sleep(0.2) + + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=listen_port, + node_id="node-reconnect-a", + enable_mdns=False, + reconnect_interval_seconds=0.1, + connect_timeout_seconds=0.1, + ) + ) + await node_a.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(1, timeout=3.0) + assert node_a.is_connected_to("node-reconnect-b") + assert node_b.is_connected_to("node-reconnect-a") + finally: + await node_a.stop() + await node_b.stop() + + asyncio.run(scenario()) + + +def _pick_free_udp_port() -> int: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + sock.bind(("127.0.0.1", 0)) + return int(sock.getsockname()[1]) + finally: + sock.close() + + +def _pick_free_tcp_port() -> int: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + sock.bind(("127.0.0.1", 0)) + return int(sock.getsockname()[1]) + finally: + sock.close() diff --git a/tests/test_node.py b/tests/test_node.py new file mode 100644 index 0000000..b46998f --- /dev/null +++ b/tests/test_node.py @@ -0,0 +1,164 @@ +"""Integration-style tests for node orchestration and persistence.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig +from minichain.crypto import derive_address, generate_key_pair +from minichain.genesis import GenesisConfig +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.node import MiniChainNode, NodeConfig +from minichain.storage import SQLiteStorage +from minichain.transaction import Transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def test_node_start_initializes_and_persists_genesis( + tmp_path: pytest.TempPathFactory, +) -> None: + config = NodeConfig( + data_dir=tmp_path / "node-data", + genesis_config=GenesisConfig( + initial_balances={"aa" * 20: 123}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + node = MiniChainNode(config) + node.start() + try: + assert node.running + assert node.height == 0 + assert node.chain_manager.state.get_account("aa" * 20).balance == 123 + metadata = node.storage.load_chain_metadata() + assert metadata is not None + assert metadata["height"] == 0 + assert metadata["head_hash"] == node.tip_hash + finally: + node.stop() + + with SQLiteStorage((tmp_path / "node-data") / "chain.sqlite3") as storage: + assert storage.get_block_by_height(0) is not None + persisted_meta = storage.load_chain_metadata() + assert persisted_meta is not None + assert persisted_meta["height"] == 0 + + +def test_node_mine_block_then_reload_from_disk( + tmp_path: pytest.TempPathFactory, +) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + miner = "11" * 20 + + config = NodeConfig( + data_dir=tmp_path / "node-data", + miner_address=miner, + genesis_config=GenesisConfig( + initial_balances={sender: 200}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + + node = MiniChainNode(config) + node.start() + try: + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=25, + nonce=0, + fee=3, + timestamp=1_739_000_010, + ) + node.submit_transaction(tx) + node.mine_one_block(max_nonce=500_000, timestamp=1_739_000_030) + + assert node.height == 1 + assert node.chain_manager.state.get_account(sender).balance == 172 + assert node.chain_manager.state.get_account(recipient).balance == 25 + assert node.chain_manager.state.get_account(miner).balance == 53 + finally: + node.stop() + + restarted = MiniChainNode(config) + restarted.start() + try: + assert restarted.height == 1 + assert restarted.chain_manager.state.get_account(sender).balance == 172 + assert restarted.chain_manager.state.get_account(recipient).balance == 25 + assert restarted.chain_manager.state.get_account(miner).balance == 53 + finally: + restarted.stop() + + +def test_accept_block_persists_chain_head( + tmp_path: pytest.TempPathFactory, +) -> None: + miner = "22" * 20 + config = NodeConfig( + data_dir=tmp_path / "node-data", + miner_address=miner, + genesis_config=GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + node = MiniChainNode(config) + node.start() + try: + candidate = build_candidate_block( + chain_manager=node.chain_manager, + mempool=node.mempool, + miner_address=miner, + max_transactions=0, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block(block_template=candidate, max_nonce=500_000) + result = node.accept_block(mined_block) + assert result == "extended" + assert node.height == 1 + finally: + node.stop() + + reopened = MiniChainNode(config) + reopened.start() + try: + assert reopened.height == 1 + assert reopened.chain_manager.state.get_account(miner).balance == 50 + finally: + reopened.stop() diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py new file mode 100644 index 0000000..cc0eb7c --- /dev/null +++ b/tests/test_scaffold.py @@ -0,0 +1,36 @@ +"""Scaffolding checks for Issue #1.""" + +from __future__ import annotations + +import importlib + +COMPONENT_MODULES = [ + "crypto", + "transaction", + "block", + "state", + "mempool", + "consensus", + "network", + "storage", + "node", + "serialization", + "merkle", + "genesis", + "chain", + "mining", +] + + +def test_component_modules_are_importable() -> None: + for module in COMPONENT_MODULES: + imported = importlib.import_module(f"minichain.{module}") + assert imported is not None + + +def test_cli_parser_defaults() -> None: + from minichain.__main__ import build_parser + + args = build_parser().parse_args([]) + assert args.host == "127.0.0.1" + assert args.port == 7000 diff --git a/tests/test_serialization.py b/tests/test_serialization.py new file mode 100644 index 0000000..4741fde --- /dev/null +++ b/tests/test_serialization.py @@ -0,0 +1,102 @@ +"""Tests for deterministic serialization.""" + +from __future__ import annotations + +from collections.abc import Callable + +import pytest + +from minichain.serialization import serialize_block_header, serialize_transaction + + +def test_transaction_serialization_is_deterministic() -> None: + tx_a = { + "sender": "a1" * 20, + "recipient": "b2" * 20, + "amount": 25, + "nonce": 1, + "fee": 2, + "timestamp": 1_739_749_000, + } + tx_b = { + "timestamp": 1_739_749_000, + "fee": 2, + "nonce": 1, + "amount": 25, + "recipient": "b2" * 20, + "sender": "a1" * 20, + } + + serialized_a = serialize_transaction(tx_a) + serialized_b = serialize_transaction(tx_b) + + assert serialized_a == serialized_b + assert b" " not in serialized_a + + +def test_changing_transaction_field_changes_serialization() -> None: + base = { + "sender": "aa" * 20, + "recipient": "bb" * 20, + "amount": 10, + "nonce": 0, + "fee": 1, + "timestamp": 123456, + } + mutated = dict(base) + mutated["amount"] = 11 + + assert serialize_transaction(base) != serialize_transaction(mutated) + + +def test_changing_block_header_field_changes_serialization() -> None: + base = { + "version": 0, + "previous_hash": "00" * 32, + "merkle_root": "11" * 32, + "timestamp": 123_456_789, + "difficulty_target": 1_000_000, + "nonce": 7, + "block_height": 3, + } + mutated = dict(base) + mutated["nonce"] = 8 + + assert serialize_block_header(base) != serialize_block_header(mutated) + + +@pytest.mark.parametrize( + "payload,serializer,expected", + [ + ( + { + "sender": "aa" * 20, + "recipient": "bb" * 20, + "amount": 1, + "nonce": 1, + "timestamp": 1, + }, + serialize_transaction, + "Missing required fields: fee", + ), + ( + { + "version": 0, + "previous_hash": "00" * 32, + "merkle_root": "11" * 32, + "timestamp": 1, + "difficulty_target": 1, + "nonce": 1, + "block_height": 1, + "extra": "x", + }, + serialize_block_header, + "Unexpected fields: extra", + ), + ], +) +def test_required_and_unexpected_fields_are_rejected( + payload: dict[str, object], serializer: Callable[[dict[str, object]], bytes], expected: str +) -> None: + with pytest.raises(ValueError, match=expected): + serializer(payload) diff --git a/tests/test_state.py b/tests/test_state.py new file mode 100644 index 0000000..256ca84 --- /dev/null +++ b/tests/test_state.py @@ -0,0 +1,268 @@ +"""Unit tests for account state transitions.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.crypto import derive_address, generate_key_pair +from minichain.state import Account, State, StateTransitionError +from minichain.transaction import Transaction, create_coinbase_transaction + + +def _signed_transaction( + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int = 1, + timestamp: int = 1_739_900_000, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp + nonce, + ) + tx.sign(sender_key) + return tx + + +def _block_with_transactions( + *, + miner_address: str, + transactions: list[Transaction], + block_reward: int, +) -> Block: + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=block_reward + sum(tx.fee for tx in transactions), + timestamp=1_739_900_100, + ) + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_900_100, + difficulty_target=1_000_000, + nonce=0, + block_height=1, + ) + block = Block(header=header, transactions=[coinbase, *transactions]) + block.update_header_merkle_root() + return block + + +def test_successful_transfer_updates_balances_and_nonce() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=25, nonce=0, fee=2 + ) + state.apply_transaction(tx) + + assert state.get_account(sender_address).balance == 73 + assert state.get_account(sender_address).nonce == 1 + assert state.get_account(recipient_address).balance == 25 + assert state.get_account(recipient_address).nonce == 0 + + +def test_insufficient_balance_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=5, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + + with pytest.raises(StateTransitionError, match="Insufficient balance"): + state.apply_transaction(tx) + + +def test_nonce_mismatch_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=1)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + + with pytest.raises(StateTransitionError, match="Nonce mismatch"): + state.apply_transaction(tx) + + +def test_transfer_to_new_address_creates_recipient_account() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=50, nonce=0)) + assert recipient_address not in state.accounts + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + state.apply_transaction(tx) + + assert recipient_address in state.accounts + assert state.get_account(recipient_address).balance == 10 + + +def test_apply_block_is_atomic_and_rolls_back_on_failure() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx_ok = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + tx_fail = _signed_transaction( + sender_key, sender_address, recipient_address, amount=95, nonce=1, fee=10 + ) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx_ok, tx_fail], + block_reward=block_reward, + ) + + with pytest.raises(StateTransitionError, match="Block application failed"): + state.apply_block(block, block_reward=block_reward) + + assert state.get_account(sender_address).balance == 100 + assert state.get_account(sender_address).nonce == 0 + assert state.get_account(recipient_address).balance == 0 + assert state.get_account(recipient_address).nonce == 0 + assert miner_address not in state.accounts + + +def test_apply_block_with_valid_coinbase_pays_reward_and_fees() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=25, nonce=0, fee=3 + ) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx], + block_reward=block_reward, + ) + + state.apply_block(block, block_reward=block_reward) + + assert state.get_account(sender_address).balance == 72 + assert state.get_account(sender_address).nonce == 1 + assert state.get_account(recipient_address).balance == 25 + assert state.get_account(miner_address).balance == 53 + + +def test_block_with_incorrect_coinbase_amount_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=2 + ) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx], + block_reward=block_reward, + ) + block.transactions[0].amount += 1 + block.update_header_merkle_root() + + with pytest.raises(StateTransitionError, match="Invalid coinbase amount"): + state.apply_block(block, block_reward=block_reward) + + +def test_block_without_coinbase_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_900_100, + difficulty_target=1_000_000, + nonce=0, + block_height=1, + ) + block = Block(header=header, transactions=[tx]) + block.update_header_merkle_root() + + with pytest.raises(StateTransitionError, match="coinbase"): + state.apply_block(block, block_reward=50) diff --git a/tests/test_storage.py b/tests/test_storage.py new file mode 100644 index 0000000..22df41b --- /dev/null +++ b/tests/test_storage.py @@ -0,0 +1,180 @@ +"""Unit tests for SQLite persistence and transactional storage behavior.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.crypto import derive_address, generate_key_pair +from minichain.state import Account, State +from minichain.storage import SQLiteStorage, StorageError +from minichain.transaction import Transaction, create_coinbase_transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def _block_with_transactions( + *, + previous_hash: str, + height: int, + timestamp: int, + transactions: list[Transaction], +) -> Block: + header = BlockHeader( + version=0, + previous_hash=previous_hash, + merkle_root="", + timestamp=timestamp, + difficulty_target=(1 << 255), + nonce=0, + block_height=height, + ) + block = Block(header=header, transactions=transactions) + block.update_header_merkle_root() + return block + + +def test_store_and_load_block_round_trip(tmp_path: pytest.TempPathFactory) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + miner = derive_address(miner_verify) + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=25, + nonce=0, + fee=2, + timestamp=1_739_990_001, + ) + coinbase = create_coinbase_transaction( + miner_address=miner, + amount=52, + timestamp=1_739_990_001, + ) + block = _block_with_transactions( + previous_hash="00" * 32, + height=1, + timestamp=1_739_990_001, + transactions=[coinbase, tx], + ) + + db_path = tmp_path / "chain.db" + with SQLiteStorage(db_path) as storage: + storage.store_block(block) + loaded_by_hash = storage.get_block_by_hash(block.hash().hex()) + loaded_by_height = storage.get_block_by_height(block.header.block_height) + + assert loaded_by_hash is not None + assert loaded_by_hash.hash() == block.hash() + assert loaded_by_hash.header.previous_hash == block.header.previous_hash + assert len(loaded_by_hash.transactions) == 2 + assert loaded_by_hash.transactions[0].is_coinbase() + assert loaded_by_hash.transactions[1].signature == tx.signature + assert loaded_by_height is not None + assert loaded_by_height.hash() == block.hash() + + +def test_state_and_metadata_persist_across_restart( + tmp_path: pytest.TempPathFactory, +) -> None: + db_path = tmp_path / "chain.db" + state = State() + state.set_account("11" * 20, Account(balance=100, nonce=2)) + state.set_account("22" * 20, Account(balance=50, nonce=0)) + head_hash = "ab" * 32 + + storage = SQLiteStorage(db_path) + storage.save_state(state) + storage.save_chain_metadata(height=7, head_hash=head_hash) + storage.close() + + reopened = SQLiteStorage(db_path) + loaded_state = reopened.load_state() + metadata = reopened.load_chain_metadata() + reopened.close() + + assert loaded_state.get_account("11" * 20).balance == 100 + assert loaded_state.get_account("11" * 20).nonce == 2 + assert loaded_state.get_account("22" * 20).balance == 50 + assert metadata == {"height": 7, "head_hash": head_hash} + + +def test_atomic_persist_rolls_back_on_metadata_failure( + tmp_path: pytest.TempPathFactory, +) -> None: + db_path = tmp_path / "chain.db" + with SQLiteStorage(db_path) as storage: + base_state = State() + base_state.set_account("aa" * 20, Account(balance=10, nonce=0)) + block_1 = _block_with_transactions( + previous_hash="00" * 32, + height=1, + timestamp=1_739_990_100, + transactions=[ + create_coinbase_transaction( + miner_address="bb" * 20, + amount=50, + timestamp=1_739_990_100, + ) + ], + ) + storage.persist_block_state_and_metadata(block=block_1, state=base_state) + + failing_state = State() + failing_state.set_account("cc" * 20, Account(balance=999, nonce=5)) + block_2 = _block_with_transactions( + previous_hash=block_1.hash().hex(), + height=2, + timestamp=1_739_990_130, + transactions=[ + create_coinbase_transaction( + miner_address="dd" * 20, + amount=50, + timestamp=1_739_990_130, + ) + ], + ) + + with pytest.raises(StorageError, match="head_hash"): + storage.persist_block_state_and_metadata( + block=block_2, + state=failing_state, + head_hash="invalid-hash", + ) + + assert storage.get_block_by_hash(block_2.hash().hex()) is None + loaded_state = storage.load_state() + metadata = storage.load_chain_metadata() + assert loaded_state.get_account("aa" * 20).balance == 10 + assert "cc" * 20 not in loaded_state.accounts + assert metadata == {"height": 1, "head_hash": block_1.hash().hex()} diff --git a/tests/test_transaction.py b/tests/test_transaction.py new file mode 100644 index 0000000..d5b2540 --- /dev/null +++ b/tests/test_transaction.py @@ -0,0 +1,85 @@ +"""Unit tests for transaction signing and verification.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import derive_address, generate_key_pair, serialize_verify_key +from minichain.transaction import COINBASE_SENDER, Transaction, create_coinbase_transaction + + +def _build_signed_transaction() -> tuple[Transaction, object]: + signing_key, verify_key = generate_key_pair() + tx = Transaction( + sender=derive_address(verify_key), + recipient="ab" * 20, + amount=25, + nonce=0, + fee=2, + timestamp=1_739_760_000, + ) + tx.sign(signing_key) + return tx, signing_key + + +def test_valid_transaction_signing_and_verification() -> None: + tx, _ = _build_signed_transaction() + + assert tx.verify() + + +def test_tampered_transaction_amount_is_rejected() -> None: + tx, _ = _build_signed_transaction() + tampered = replace(tx, amount=tx.amount + 1) + + assert not tampered.verify() + + +def test_tampered_transaction_recipient_is_rejected() -> None: + tx, _ = _build_signed_transaction() + tampered = replace(tx, recipient="cd" * 20) + + assert not tampered.verify() + + +def test_mismatched_public_key_and_sender_is_rejected() -> None: + tx, _ = _build_signed_transaction() + other_signing_key, other_verify_key = generate_key_pair() + _ = other_signing_key + tampered = replace(tx, public_key=serialize_verify_key(other_verify_key)) + + assert not tampered.verify() + + +def test_transaction_id_changes_when_signature_changes() -> None: + tx, _ = _build_signed_transaction() + original_id = tx.transaction_id() + tampered = replace(tx, signature="00" * 64) + + assert tampered.transaction_id() != original_id + + +def test_coinbase_transaction_verifies_without_signature() -> None: + tx = create_coinbase_transaction( + miner_address="ef" * 20, + amount=55, + timestamp=1_739_760_111, + ) + + assert tx.sender == COINBASE_SENDER + assert tx.verify() + + +def test_coinbase_with_auth_fields_is_rejected() -> None: + tx = create_coinbase_transaction( + miner_address="ef" * 20, + amount=55, + timestamp=1_739_760_111, + ) + tampered = replace(tx, signature="00" * 64) + + assert not tampered.verify() diff --git a/tests/test_transaction_gossip.py b/tests/test_transaction_gossip.py new file mode 100644 index 0000000..ae95d5f --- /dev/null +++ b/tests/test_transaction_gossip.py @@ -0,0 +1,103 @@ +"""Integration tests for transaction gossip propagation.""" + +from __future__ import annotations + +import asyncio + +from minichain.crypto import derive_address, generate_key_pair +from minichain.network import MiniChainNetwork, NetworkConfig +from minichain.transaction import Transaction + + +def test_transaction_gossip_propagates_across_three_nodes() -> None: + async def scenario() -> None: + seen_by_node: dict[str, list[str]] = {"a": [], "b": [], "c": []} + + def make_handler(node_name: str): + def handler(transaction: Transaction) -> bool: + seen_by_node[node_name].append(transaction.transaction_id().hex()) + return True + + return handler + + node_b = MiniChainNetwork( + NetworkConfig(host="127.0.0.1", port=0, node_id="node-b", enable_mdns=False) + ) + node_b.set_transaction_handler(make_handler("b")) + await node_b.start() + + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-a", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_a.set_transaction_handler(make_handler("a")) + await node_a.start() + + node_c = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-c", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_c.set_transaction_handler(make_handler("c")) + await node_c.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(2, timeout=3.0) + await node_c.wait_for_connected_peers(1, timeout=3.0) + + transaction = _build_signed_transaction() + assert await node_a.submit_transaction(transaction) + + await _wait_until( + lambda: len(seen_by_node["b"]) == 1 and len(seen_by_node["c"]) == 1, + timeout=3.0, + ) + assert len(seen_by_node["a"]) == 1 + assert len(seen_by_node["b"]) == 1 + assert len(seen_by_node["c"]) == 1 + + assert not await node_a.submit_transaction(transaction) + await asyncio.sleep(0.2) + assert len(seen_by_node["a"]) == 1 + assert len(seen_by_node["b"]) == 1 + assert len(seen_by_node["c"]) == 1 + finally: + await node_c.stop() + await node_a.stop() + await node_b.stop() + + asyncio.run(scenario()) + + +def _build_signed_transaction() -> Transaction: + signing_key, verify_key = generate_key_pair() + sender = derive_address(verify_key) + transaction = Transaction( + sender=sender, + recipient="11" * 20, + amount=25, + nonce=0, + fee=1, + timestamp=1_700_000_000, + ) + transaction.sign(signing_key) + return transaction + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout")