docs: update README with comprehensive test results, CLI documentation, and enhanced feature descriptions

- Update key capabilities to include GPU marketplace, payments, billing, and governance
- Expand CLI section from basic examples to 12 command groups with 90+ subcommands
- Add detailed test results table showing 208 passing tests across 6 test suites
- Update documentation links to reference new CLI reference and coordinator API docs
- Revise test commands to reflect actual test structure (
This commit is contained in:
oib
2026-02-12 20:58:21 +01:00
parent 5120861e17
commit 65b63de56f
47 changed files with 5622 additions and 1148 deletions

View File

@@ -0,0 +1,254 @@
"""Tests for mempool implementations (InMemory and Database-backed)"""
import json
import os
import tempfile
import time
import pytest
from aitbc_chain.mempool import (
InMemoryMempool,
DatabaseMempool,
PendingTransaction,
compute_tx_hash,
_estimate_size,
init_mempool,
get_mempool,
)
from aitbc_chain.metrics import metrics_registry
@pytest.fixture(autouse=True)
def reset_metrics():
metrics_registry.reset()
yield
metrics_registry.reset()
class TestComputeTxHash:
def test_deterministic(self):
tx = {"sender": "alice", "recipient": "bob", "fee": 10}
assert compute_tx_hash(tx) == compute_tx_hash(tx)
def test_different_for_different_tx(self):
tx1 = {"sender": "alice", "fee": 1}
tx2 = {"sender": "bob", "fee": 1}
assert compute_tx_hash(tx1) != compute_tx_hash(tx2)
def test_hex_prefix(self):
tx = {"sender": "alice"}
assert compute_tx_hash(tx).startswith("0x")
class TestInMemoryMempool:
def test_add_and_list(self):
pool = InMemoryMempool()
tx = {"sender": "alice", "recipient": "bob", "fee": 5}
tx_hash = pool.add(tx)
assert tx_hash.startswith("0x")
txs = pool.list_transactions()
assert len(txs) == 1
assert txs[0].tx_hash == tx_hash
assert txs[0].fee == 5
def test_duplicate_ignored(self):
pool = InMemoryMempool()
tx = {"sender": "alice", "fee": 1}
h1 = pool.add(tx)
h2 = pool.add(tx)
assert h1 == h2
assert pool.size() == 1
def test_min_fee_rejected(self):
pool = InMemoryMempool(min_fee=10)
with pytest.raises(ValueError, match="below minimum"):
pool.add({"sender": "alice", "fee": 5})
def test_min_fee_accepted(self):
pool = InMemoryMempool(min_fee=10)
pool.add({"sender": "alice", "fee": 10})
assert pool.size() == 1
def test_max_size_eviction(self):
pool = InMemoryMempool(max_size=2)
pool.add({"sender": "a", "fee": 1, "nonce": 1})
pool.add({"sender": "b", "fee": 5, "nonce": 2})
# Adding a 3rd should evict the lowest fee
pool.add({"sender": "c", "fee": 10, "nonce": 3})
assert pool.size() == 2
txs = pool.list_transactions()
fees = sorted([t.fee for t in txs])
assert fees == [5, 10] # fee=1 was evicted
def test_drain_by_fee_priority(self):
pool = InMemoryMempool()
pool.add({"sender": "low", "fee": 1, "nonce": 1})
pool.add({"sender": "high", "fee": 100, "nonce": 2})
pool.add({"sender": "mid", "fee": 50, "nonce": 3})
drained = pool.drain(max_count=2, max_bytes=1_000_000)
assert len(drained) == 2
assert drained[0].fee == 100 # highest first
assert drained[1].fee == 50
assert pool.size() == 1 # low fee remains
def test_drain_respects_max_count(self):
pool = InMemoryMempool()
for i in range(10):
pool.add({"sender": f"s{i}", "fee": i, "nonce": i})
drained = pool.drain(max_count=3, max_bytes=1_000_000)
assert len(drained) == 3
assert pool.size() == 7
def test_drain_respects_max_bytes(self):
pool = InMemoryMempool()
# Each tx is ~33 bytes serialized
for i in range(5):
pool.add({"sender": f"s{i}", "fee": i, "nonce": i})
# Drain with byte limit that fits only one tx (~33 bytes each)
drained = pool.drain(max_count=100, max_bytes=34)
assert len(drained) == 1 # only one fits
assert pool.size() == 4
def test_remove(self):
pool = InMemoryMempool()
tx_hash = pool.add({"sender": "alice", "fee": 1})
assert pool.size() == 1
assert pool.remove(tx_hash) is True
assert pool.size() == 0
assert pool.remove(tx_hash) is False
def test_size(self):
pool = InMemoryMempool()
assert pool.size() == 0
pool.add({"sender": "a", "fee": 1, "nonce": 1})
pool.add({"sender": "b", "fee": 2, "nonce": 2})
assert pool.size() == 2
class TestDatabaseMempool:
@pytest.fixture
def db_pool(self, tmp_path):
db_path = str(tmp_path / "mempool.db")
return DatabaseMempool(db_path, max_size=100, min_fee=0)
def test_add_and_list(self, db_pool):
tx = {"sender": "alice", "recipient": "bob", "fee": 5}
tx_hash = db_pool.add(tx)
assert tx_hash.startswith("0x")
txs = db_pool.list_transactions()
assert len(txs) == 1
assert txs[0].tx_hash == tx_hash
assert txs[0].fee == 5
def test_duplicate_ignored(self, db_pool):
tx = {"sender": "alice", "fee": 1}
h1 = db_pool.add(tx)
h2 = db_pool.add(tx)
assert h1 == h2
assert db_pool.size() == 1
def test_min_fee_rejected(self, tmp_path):
pool = DatabaseMempool(str(tmp_path / "fee.db"), min_fee=10)
with pytest.raises(ValueError, match="below minimum"):
pool.add({"sender": "alice", "fee": 5})
def test_max_size_eviction(self, tmp_path):
pool = DatabaseMempool(str(tmp_path / "evict.db"), max_size=2)
pool.add({"sender": "a", "fee": 1, "nonce": 1})
pool.add({"sender": "b", "fee": 5, "nonce": 2})
pool.add({"sender": "c", "fee": 10, "nonce": 3})
assert pool.size() == 2
txs = pool.list_transactions()
fees = sorted([t.fee for t in txs])
assert fees == [5, 10]
def test_drain_by_fee_priority(self, db_pool):
db_pool.add({"sender": "low", "fee": 1, "nonce": 1})
db_pool.add({"sender": "high", "fee": 100, "nonce": 2})
db_pool.add({"sender": "mid", "fee": 50, "nonce": 3})
drained = db_pool.drain(max_count=2, max_bytes=1_000_000)
assert len(drained) == 2
assert drained[0].fee == 100
assert drained[1].fee == 50
assert db_pool.size() == 1
def test_drain_respects_max_count(self, db_pool):
for i in range(10):
db_pool.add({"sender": f"s{i}", "fee": i, "nonce": i})
drained = db_pool.drain(max_count=3, max_bytes=1_000_000)
assert len(drained) == 3
assert db_pool.size() == 7
def test_remove(self, db_pool):
tx_hash = db_pool.add({"sender": "alice", "fee": 1})
assert db_pool.size() == 1
assert db_pool.remove(tx_hash) is True
assert db_pool.size() == 0
assert db_pool.remove(tx_hash) is False
def test_persistence(self, tmp_path):
db_path = str(tmp_path / "persist.db")
pool1 = DatabaseMempool(db_path)
pool1.add({"sender": "alice", "fee": 1})
pool1.add({"sender": "bob", "fee": 2})
assert pool1.size() == 2
# New instance reads same data
pool2 = DatabaseMempool(db_path)
assert pool2.size() == 2
txs = pool2.list_transactions()
assert len(txs) == 2
class TestCircuitBreaker:
def test_starts_closed(self):
from aitbc_chain.consensus.poa import CircuitBreaker
cb = CircuitBreaker(threshold=3, timeout=1)
assert cb.state == "closed"
assert cb.allow_request() is True
def test_opens_after_threshold(self):
from aitbc_chain.consensus.poa import CircuitBreaker
cb = CircuitBreaker(threshold=3, timeout=10)
cb.record_failure()
cb.record_failure()
assert cb.state == "closed"
cb.record_failure()
assert cb.state == "open"
assert cb.allow_request() is False
def test_half_open_after_timeout(self):
from aitbc_chain.consensus.poa import CircuitBreaker
cb = CircuitBreaker(threshold=1, timeout=1)
cb.record_failure()
assert cb.state == "open"
assert cb.allow_request() is False
# Simulate timeout by manipulating last failure time
cb._last_failure_time = time.time() - 2
assert cb.state == "half-open"
assert cb.allow_request() is True
def test_success_resets(self):
from aitbc_chain.consensus.poa import CircuitBreaker
cb = CircuitBreaker(threshold=2, timeout=10)
cb.record_failure()
cb.record_failure()
assert cb.state == "open"
cb.record_success()
assert cb.state == "closed"
assert cb.allow_request() is True
class TestInitMempool:
def test_init_memory(self):
init_mempool(backend="memory", max_size=50, min_fee=0)
pool = get_mempool()
assert isinstance(pool, InMemoryMempool)
def test_init_database(self, tmp_path):
db_path = str(tmp_path / "init.db")
init_mempool(backend="database", db_path=db_path, max_size=50, min_fee=0)
pool = get_mempool()
assert isinstance(pool, DatabaseMempool)

View File

@@ -0,0 +1,340 @@
"""Tests for chain synchronization, conflict resolution, and signature validation."""
import hashlib
import time
import pytest
from datetime import datetime
from contextlib import contextmanager
from sqlmodel import Session, SQLModel, create_engine, select
from aitbc_chain.models import Block, Transaction
from aitbc_chain.metrics import metrics_registry
from aitbc_chain.sync import ChainSync, ProposerSignatureValidator, ImportResult
@pytest.fixture(autouse=True)
def reset_metrics():
metrics_registry.reset()
yield
metrics_registry.reset()
@pytest.fixture
def db_engine(tmp_path):
db_path = tmp_path / "test_sync.db"
engine = create_engine(f"sqlite:///{db_path}", echo=False)
SQLModel.metadata.create_all(engine)
return engine
@pytest.fixture
def session_factory(db_engine):
@contextmanager
def _factory():
with Session(db_engine) as session:
yield session
return _factory
def _make_block_hash(chain_id, height, parent_hash, timestamp):
payload = f"{chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}".encode()
return "0x" + hashlib.sha256(payload).hexdigest()
def _seed_chain(session_factory, count=5, chain_id="test-chain", proposer="proposer-a"):
"""Seed a chain with `count` blocks."""
parent_hash = "0x00"
blocks = []
with session_factory() as session:
for h in range(count):
ts = datetime(2026, 1, 1, 0, 0, h)
bh = _make_block_hash(chain_id, h, parent_hash, ts)
block = Block(
height=h, hash=bh, parent_hash=parent_hash,
proposer=proposer, timestamp=ts, tx_count=0,
)
session.add(block)
blocks.append({"height": h, "hash": bh, "parent_hash": parent_hash,
"proposer": proposer, "timestamp": ts.isoformat()})
parent_hash = bh
session.commit()
return blocks
class TestProposerSignatureValidator:
def test_valid_block(self):
v = ProposerSignatureValidator()
ts = datetime.utcnow()
bh = _make_block_hash("test", 1, "0x00", ts)
ok, reason = v.validate_block_signature({
"height": 1, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert ok is True
assert reason == "Valid"
def test_missing_proposer(self):
v = ProposerSignatureValidator()
ok, reason = v.validate_block_signature({
"height": 1, "hash": "0x" + "a" * 64, "parent_hash": "0x00",
"timestamp": datetime.utcnow().isoformat(),
})
assert ok is False
assert "Missing proposer" in reason
def test_invalid_hash_format(self):
v = ProposerSignatureValidator()
ok, reason = v.validate_block_signature({
"height": 1, "hash": "badhash", "parent_hash": "0x00",
"proposer": "node-a", "timestamp": datetime.utcnow().isoformat(),
})
assert ok is False
assert "Invalid block hash" in reason
def test_invalid_hash_length(self):
v = ProposerSignatureValidator()
ok, reason = v.validate_block_signature({
"height": 1, "hash": "0xabc", "parent_hash": "0x00",
"proposer": "node-a", "timestamp": datetime.utcnow().isoformat(),
})
assert ok is False
assert "Invalid hash length" in reason
def test_untrusted_proposer_rejected(self):
v = ProposerSignatureValidator(trusted_proposers=["node-a", "node-b"])
ts = datetime.utcnow()
bh = _make_block_hash("test", 1, "0x00", ts)
ok, reason = v.validate_block_signature({
"height": 1, "hash": bh, "parent_hash": "0x00",
"proposer": "node-evil", "timestamp": ts.isoformat(),
})
assert ok is False
assert "not in trusted set" in reason
def test_trusted_proposer_accepted(self):
v = ProposerSignatureValidator(trusted_proposers=["node-a"])
ts = datetime.utcnow()
bh = _make_block_hash("test", 1, "0x00", ts)
ok, reason = v.validate_block_signature({
"height": 1, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert ok is True
def test_add_remove_trusted(self):
v = ProposerSignatureValidator()
assert len(v.trusted_proposers) == 0
v.add_trusted("node-x")
assert "node-x" in v.trusted_proposers
v.remove_trusted("node-x")
assert "node-x" not in v.trusted_proposers
def test_missing_required_field(self):
v = ProposerSignatureValidator()
ok, reason = v.validate_block_signature({
"hash": "0x" + "a" * 64, "proposer": "node-a",
# missing height, parent_hash, timestamp
})
assert ok is False
assert "Missing required field" in reason
class TestChainSyncAppend:
def test_append_to_empty_chain(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
result = sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert result.accepted is True
assert result.height == 0
def test_append_sequential(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
blocks = _seed_chain(session_factory, count=3, chain_id="test")
last = blocks[-1]
ts = datetime(2026, 1, 1, 0, 0, 3)
bh = _make_block_hash("test", 3, last["hash"], ts)
result = sync.import_block({
"height": 3, "hash": bh, "parent_hash": last["hash"],
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert result.accepted is True
assert result.height == 3
def test_duplicate_rejected(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
blocks = _seed_chain(session_factory, count=2, chain_id="test")
result = sync.import_block({
"height": 0, "hash": blocks[0]["hash"], "parent_hash": "0x00",
"proposer": "proposer-a", "timestamp": blocks[0]["timestamp"],
})
assert result.accepted is False
assert "already exists" in result.reason
def test_stale_block_rejected(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
_seed_chain(session_factory, count=5, chain_id="test")
ts = datetime(2026, 6, 1)
bh = _make_block_hash("test", 2, "0x00", ts)
result = sync.import_block({
"height": 2, "hash": bh, "parent_hash": "0x00",
"proposer": "node-b", "timestamp": ts.isoformat(),
})
assert result.accepted is False
assert "Stale" in result.reason or "Fork" in result.reason or "longer" in result.reason
def test_gap_detected(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
_seed_chain(session_factory, count=3, chain_id="test")
ts = datetime(2026, 6, 1)
bh = _make_block_hash("test", 10, "0x00", ts)
result = sync.import_block({
"height": 10, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert result.accepted is False
assert "Gap" in result.reason
def test_append_with_transactions(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
blocks = _seed_chain(session_factory, count=1, chain_id="test")
last = blocks[-1]
ts = datetime(2026, 1, 1, 0, 0, 1)
bh = _make_block_hash("test", 1, last["hash"], ts)
txs = [
{"tx_hash": "0x" + "a" * 64, "sender": "alice", "recipient": "bob"},
{"tx_hash": "0x" + "b" * 64, "sender": "charlie", "recipient": "dave"},
]
result = sync.import_block({
"height": 1, "hash": bh, "parent_hash": last["hash"],
"proposer": "node-a", "timestamp": ts.isoformat(), "tx_count": 2,
}, transactions=txs)
assert result.accepted is True
# Verify transactions were stored
with session_factory() as session:
stored_txs = session.exec(select(Transaction).where(Transaction.block_height == 1)).all()
assert len(stored_txs) == 2
class TestChainSyncSignatureValidation:
def test_untrusted_proposer_rejected_on_import(self, session_factory):
validator = ProposerSignatureValidator(trusted_proposers=["node-a"])
sync = ChainSync(session_factory, chain_id="test", validator=validator, validate_signatures=True)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
result = sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-evil", "timestamp": ts.isoformat(),
})
assert result.accepted is False
assert "not in trusted set" in result.reason
def test_trusted_proposer_accepted_on_import(self, session_factory):
validator = ProposerSignatureValidator(trusted_proposers=["node-a"])
sync = ChainSync(session_factory, chain_id="test", validator=validator, validate_signatures=True)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
result = sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
assert result.accepted is True
def test_validation_disabled(self, session_factory):
validator = ProposerSignatureValidator(trusted_proposers=["node-a"])
sync = ChainSync(session_factory, chain_id="test", validator=validator, validate_signatures=False)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
result = sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-evil", "timestamp": ts.isoformat(),
})
assert result.accepted is True # validation disabled
class TestChainSyncConflictResolution:
def test_fork_at_same_height_rejected(self, session_factory):
"""Fork at same height as our chain — our chain wins (equal length)."""
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
blocks = _seed_chain(session_factory, count=5, chain_id="test")
# Try to import a different block at height 3
ts = datetime(2026, 6, 15)
bh = _make_block_hash("test", 3, "0xdifferent", ts)
result = sync.import_block({
"height": 3, "hash": bh, "parent_hash": "0xdifferent",
"proposer": "node-b", "timestamp": ts.isoformat(),
})
assert result.accepted is False
assert "longer" in result.reason or "Fork" in result.reason
def test_sync_status(self, session_factory):
sync = ChainSync(session_factory, chain_id="test-chain", validate_signatures=False)
_seed_chain(session_factory, count=5, chain_id="test-chain")
status = sync.get_sync_status()
assert status["chain_id"] == "test-chain"
assert status["head_height"] == 4
assert status["total_blocks"] == 5
assert status["max_reorg_depth"] == 10
class TestSyncMetrics:
def test_accepted_block_increments_metrics(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-a", "timestamp": ts.isoformat(),
})
prom = metrics_registry.render_prometheus()
assert "sync_blocks_received_total" in prom
assert "sync_blocks_accepted_total" in prom
def test_rejected_block_increments_metrics(self, session_factory):
validator = ProposerSignatureValidator(trusted_proposers=["node-a"])
sync = ChainSync(session_factory, chain_id="test", validator=validator, validate_signatures=True)
ts = datetime.utcnow()
bh = _make_block_hash("test", 0, "0x00", ts)
sync.import_block({
"height": 0, "hash": bh, "parent_hash": "0x00",
"proposer": "node-evil", "timestamp": ts.isoformat(),
})
prom = metrics_registry.render_prometheus()
assert "sync_blocks_rejected_total" in prom
def test_duplicate_increments_metrics(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
_seed_chain(session_factory, count=1, chain_id="test")
with session_factory() as session:
block = session.exec(select(Block).where(Block.height == 0)).first()
sync.import_block({
"height": 0, "hash": block.hash, "parent_hash": "0x00",
"proposer": "proposer-a", "timestamp": block.timestamp.isoformat(),
})
prom = metrics_registry.render_prometheus()
assert "sync_blocks_duplicate_total" in prom
def test_fork_increments_metrics(self, session_factory):
sync = ChainSync(session_factory, chain_id="test", validate_signatures=False)
_seed_chain(session_factory, count=5, chain_id="test")
ts = datetime(2026, 6, 15)
bh = _make_block_hash("test", 3, "0xdifferent", ts)
sync.import_block({
"height": 3, "hash": bh, "parent_hash": "0xdifferent",
"proposer": "node-b", "timestamp": ts.isoformat(),
})
prom = metrics_registry.render_prometheus()
assert "sync_forks_detected_total" in prom