Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
*.cb2
.*.lb


## Intermediate documents:
*.dvi
*.xdv
Expand Down Expand Up @@ -332,3 +333,4 @@ __pycache__/
*.so
*bore.zip
*bore_bin
node_data/
48 changes: 36 additions & 12 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,22 @@
import argparse
import asyncio
import logging
import os
import re
import sys

from nacl.signing import SigningKey
from nacl.encoding import HexEncoder

from minichain import Transaction, Blockchain, Block, State, Mempool, P2PNetwork, mine_block
from minichain.persistence import save, load
from minichain.validators import is_valid_receiver


logger = logging.getLogger(__name__)

BURN_ADDRESS = "0" * 40
DATA_DIR = "node_data"


# ──────────────────────────────────────────────
Expand Down Expand Up @@ -82,12 +86,14 @@ def mine_and_process_block(chain, mempool, miner_pk):
mined_block = mine_block(block)

if chain.add_block(mined_block):
logger.info("✅ Block #%d mined and added (%d txs)", mined_block.index, len(mineable_txs))
if chain.add_block(mined_block):
logger.info("✅ Block #%d mined and added (%d txs)", mined_block.index, len(mineable_txs))
mempool.remove_transactions(mineable_txs)
mempool.remove_transactions(mineable_txs)
chain.state.credit_mining_reward(miner_pk)
return mined_block
else:
logger.error("Block rejected by chain")
logger.error("Block rejected by chain")
return None


Expand All @@ -108,13 +114,13 @@ async def handler(data):
for addr, acc in remote_accounts.items():
if addr not in chain.state.accounts:
chain.state.accounts[addr] = acc
logger.info("🔄 Synced account %s... (balance=%d)", addr[:12], acc.get("balance", 0))
logger.info("🔄 State sync complete — %d accounts", len(chain.state.accounts))
logger.info("Synced account %s... (balance=%d)", addr[:12], acc.get("balance", 0))
logger.info("State sync complete — %d accounts", len(chain.state.accounts))

elif msg_type == "tx":
tx = Transaction(**payload)
if mempool.add_transaction(tx):
logger.info("📥 Received tx from %s... (amount=%s)", tx.sender[:8], tx.amount)
logger.info("Received tx from %s... (amount=%s)", tx.sender[:8], tx.amount)

elif msg_type == "block":
txs_raw = payload.get("transactions", [])
Expand All @@ -132,7 +138,7 @@ async def handler(data):
block.hash = block_hash

if chain.add_block(block):
logger.info("📥 Received Block #%d — added to chain", block.index)
logger.info("Received Block #%d — added to chain", block.index)

# Apply mining reward for the remote miner (burn address as placeholder)
miner = payload.get("miner", BURN_ADDRESS)
Expand All @@ -141,7 +147,7 @@ async def handler(data):
# Drop only confirmed transactions so higher nonces can remain queued.
mempool.remove_transactions(block.transactions)
else:
logger.warning("📥 Received Block #%s — rejected", block.index)
logger.warning("Received Block #%s — rejected", block.index)

return handler

Expand Down Expand Up @@ -217,9 +223,9 @@ async def cli_loop(sk, pk, chain, mempool, network):

if mempool.add_transaction(tx):
await network.broadcast_transaction(tx)
print(f" Tx sent: {amount} coins {receiver[:12]}...")
print(f" Tx sent: {amount} coins -> {receiver[:12]}...")
else:
print(" Transaction rejected (invalid sig, duplicate, or mempool full).")
print(" Transaction rejected (invalid sig, duplicate, or mempool full).")

# ── mine ──
elif cmd == "mine":
Expand Down Expand Up @@ -275,7 +281,19 @@ async def run_node(port: int, connect_to: str | None, fund: int):
"""Boot the node, optionally connect to a peer, then enter the CLI."""
sk, pk = create_wallet()

chain = Blockchain()
# ── Load existing chain or start fresh ──────────────────────────────────
chain_file = os.path.join(DATA_DIR, "blockchain.json")
if os.path.exists(chain_file):
try:
chain = load(path=DATA_DIR)
logger.info("Loaded existing chain (%d blocks) from '%s'", len(chain.chain), DATA_DIR)
except Exception as e:
logger.warning("Could not load chain: %s — starting fresh.", e)
chain = Blockchain()
else:
chain = Blockchain()
logger.info("No saved chain found — starting fresh.")

mempool = Mempool()
network = P2PNetwork()

Expand All @@ -291,7 +309,7 @@ async def on_peer_connected(writer):
}) + "\n"
writer.write(sync_msg.encode())
await writer.drain()
logger.info("🔄 Sent state sync to new peer")
logger.info("Sent state sync to new peer")

network._on_peer_connected = on_peer_connected

Expand All @@ -300,7 +318,7 @@ async def on_peer_connected(writer):
# Fund this node's wallet so it can transact in the demo
if fund > 0:
chain.state.credit_mining_reward(pk, reward=fund)
logger.info("💰 Funded %s... with %d coins", pk[:12], fund)
logger.info("Funded %s... with %d coins", pk[:12], fund)

# Connect to a seed peer if requested
if connect_to:
Expand All @@ -313,6 +331,12 @@ async def on_peer_connected(writer):
try:
await cli_loop(sk, pk, chain, mempool, network)
finally:
try:
os.makedirs(DATA_DIR, exist_ok=True)
save(chain, path=DATA_DIR)
logger.info("Chain saved to '%s' (%d blocks)", DATA_DIR, len(chain.chain))
except Exception as e:
logger.error("Failed to save chain: %s", e)
await network.stop()


Expand Down
3 changes: 3 additions & 0 deletions minichain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from .contract import ContractMachine
from .p2p import P2PNetwork
from .mempool import Mempool
from .persistence import save, load

__all__ = [
"mine_block",
Expand All @@ -18,4 +19,6 @@
"ContractMachine",
"P2PNetwork",
"Mempool",
"save",
"load",
]
145 changes: 145 additions & 0 deletions minichain/persistence.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
"""
Chain persistence: save and load the blockchain and state to/from JSON.

Design:
- blockchain.json holds the full list of serialized blocks
- state.json holds the accounts dict

Usage:
from minichain.persistence import save, load

save(blockchain, path="data/")
blockchain = load(path="data/")
"""

import json
import os
import logging
from .block import Block
from .transaction import Transaction
from .chain import Blockchain

logger = logging.getLogger(__name__)

_CHAIN_FILE = "blockchain.json"
_STATE_FILE = "state.json"


# Public API

def save(blockchain: Blockchain, path: str = ".") -> None:
"""
Persist the blockchain and account state to two JSON files inside `path`.

Args:
blockchain: The live Blockchain instance to save.
path: Directory to write blockchain.json and state.json into.
"""
os.makedirs(path, exist_ok=True)

_write_json(
os.path.join(path, _CHAIN_FILE),
[block.to_dict() for block in blockchain.chain],
)

_write_json(
os.path.join(path, _STATE_FILE),
blockchain.state.accounts,
)

logger.info(
"Saved %d blocks and %d accounts to '%s'",
len(blockchain.chain),
len(blockchain.state.accounts),
path,
)


def load(path: str = ".") -> Blockchain:
"""
Restore a Blockchain from JSON files inside `path`.

Returns a fully initialised Blockchain whose chain and state match
what was previously saved with save().

Raises:
FileNotFoundError: if blockchain.json or state.json are missing.
ValueError: if the data is structurally invalid.
"""
chain_path = os.path.join(path, _CHAIN_FILE)
state_path = os.path.join(path, _STATE_FILE)

raw_blocks = _read_json(chain_path)
raw_accounts = _read_json(state_path)

if not isinstance(raw_blocks, list) or not raw_blocks:
raise ValueError(f"Invalid or empty chain data in '{chain_path}'")

# FIX: validate raw_accounts is a dict before use
if not isinstance(raw_accounts, dict):
raise ValueError(f"Invalid accounts data in '{state_path}'")

blockchain = Blockchain.__new__(Blockchain) # skip __init__ (no genesis)
import threading
from .state import State
from .contract import ContractMachine

blockchain._lock = threading.RLock()
blockchain.chain = [_deserialize_block(b) for b in raw_blocks]

blockchain.state = State.__new__(State)
blockchain.state.accounts = raw_accounts
blockchain.state.contract_machine = ContractMachine(blockchain.state)

logger.info(
"Loaded %d blocks and %d accounts from '%s'",
len(blockchain.chain),
len(blockchain.state.accounts),
path,
)
return blockchain


# Helpers

def _write_json(filepath: str, data) -> None:
with open(filepath, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2)


def _read_json(filepath: str):
if not os.path.exists(filepath):
raise FileNotFoundError(f"Persistence file not found: '{filepath}'")
with open(filepath, "r", encoding="utf-8") as f:
return json.load(f)


def _deserialize_block(data: dict) -> Block:
"""Reconstruct a Block (including its transactions) from a plain dict."""
transactions = [
Transaction(
sender=tx["sender"],
receiver=tx["receiver"],
amount=tx["amount"],
nonce=tx["nonce"],
data=tx.get("data"),
signature=tx.get("signature"),
timestamp=tx["timestamp"],
)
for tx in data.get("transactions", [])
]

block = Block(
index=data["index"],
previous_hash=data["previous_hash"],
transactions=transactions,
timestamp=data["timestamp"],
difficulty=data.get("difficulty"),
)
block.nonce = data["nonce"]
block.hash = data["hash"]
# Preserve the stored merkle root rather than recomputing to guard against
# any future change in the hash algorithm.
block.merkle_root = data.get("merkle_root")
return block

Loading