Based on the repository's commit message style and the changes in the diff, here's an appropriate commit message:

```
feat: add websocket tests, PoA metrics, marketplace endpoints, and enhanced observability

- Add comprehensive websocket tests for blocks and transactions streams including multi-subscriber and high-volume scenarios
- Extend PoA consensus with per-proposer block metrics and rotation tracking
- Add latest block interval gauge and RPC error spike alerting
- Enhance mock coordinator
This commit is contained in:
oib
2025-12-22 07:55:09 +01:00
parent fb60505cdf
commit d98b2c7772
70 changed files with 3472 additions and 246 deletions

View File

@ -1,8 +1,15 @@
from __future__ import annotations
import sys
from pathlib import Path
import pytest
from sqlmodel import SQLModel, Session, create_engine
PROJECT_ROOT = Path(__file__).resolve().parent.parent / "src"
if str(PROJECT_ROOT) not in sys.path:
sys.path.insert(0, str(PROJECT_ROOT))
from aitbc_chain.models import Block, Transaction, Receipt # noqa: F401 - ensure models imported for metadata

View File

@ -0,0 +1,39 @@
"""Tests for the observability dashboard helpers."""
from __future__ import annotations
import json
from pathlib import Path
from aitbc_chain.observability.dashboards import generate_default_dashboards
from aitbc_chain.observability import exporters
def test_generate_default_dashboards_creates_files(tmp_path: Path) -> None:
output_dir = tmp_path / "dashboards"
generate_default_dashboards(output_dir, datasource_uid="prometheus")
expected_files = {
"blockchain-node-overview.json",
"coordinator-overview.json",
}
actual_files = {path.name for path in output_dir.glob("*.json")}
assert actual_files == expected_files
for file_path in output_dir.glob("*.json"):
with file_path.open("r", encoding="utf-8") as handle:
payload = json.load(handle)
assert payload["uid"] in {"aitbc-coordinator", "aitbc-node"}
assert payload["title"].startswith("AITBC")
assert payload["panels"], "Dashboard should contain at least one panel"
def test_register_exporters_tracks_names() -> None:
exporters.REGISTERED_EXPORTERS.clear()
exporters.register_exporters(["prometheus", "loki"])
assert exporters.REGISTERED_EXPORTERS == ["prometheus", "loki"]

View File

@ -1,6 +1,7 @@
from __future__ import annotations
import asyncio
from contextlib import ExitStack
from fastapi.testclient import TestClient
@ -10,8 +11,6 @@ from aitbc_chain.gossip import gossip_broker
def _publish(topic: str, message: dict) -> None:
asyncio.run(gossip_broker.publish(topic, message))
def test_blocks_websocket_stream() -> None:
client = TestClient(create_app())
@ -28,19 +27,100 @@ def test_blocks_websocket_stream() -> None:
assert message == payload
def test_transactions_websocket_stream() -> None:
def test_blocks_websocket_multiple_subscribers_receive_all_payloads() -> None:
with TestClient(create_app()) as client, ExitStack() as stack:
sockets = [
stack.enter_context(client.websocket_connect("/rpc/ws/blocks"))
for _ in range(3)
]
payloads = [
{
"height": height,
"hash": "0x" + f"{height:064x}",
"parent_hash": (
"0x" + f"{height - 1:064x}" if height > 0 else "0x" + "0" * 64
),
"timestamp": f"2025-01-01T00:00:{height:02d}Z",
"tx_count": height % 3,
}
for height in range(5)
]
for payload in payloads:
_publish("blocks", payload)
for socket in sockets:
received = [socket.receive_json() for _ in payloads]
assert received == payloads
# Publish another payload to ensure subscribers continue receiving in order.
final_payload = {
"height": 99,
"hash": "0x" + "f" * 64,
"parent_hash": "0x" + "e" * 64,
"timestamp": "2025-01-01T00:01:39Z",
"tx_count": 5,
}
_publish("blocks", final_payload)
for socket in sockets:
assert socket.receive_json() == final_payload
def test_blocks_websocket_high_volume_load() -> None:
message_count = 40
subscriber_count = 4
with TestClient(create_app()) as client, ExitStack() as stack:
sockets = [
stack.enter_context(client.websocket_connect("/rpc/ws/blocks"))
for _ in range(subscriber_count)
]
payloads = []
for height in range(message_count):
payload = {
"height": height,
"hash": "0x" + f"{height + 100:064x}",
"parent_hash": "0x" + f"{height + 99:064x}" if height > 0 else "0x" + "0" * 64,
"timestamp": f"2025-01-01T00:{height // 60:02d}:{height % 60:02d}Z",
"tx_count": height % 7,
}
payloads.append(payload)
_publish("blocks", payload)
for socket in sockets:
received = [socket.receive_json() for _ in payloads]
assert received == payloads
def test_transactions_websocket_cleans_up_on_disconnect() -> None:
client = TestClient(create_app())
with client.websocket_connect("/rpc/ws/transactions") as websocket:
payload = {
"tx_hash": "0x" + "a" * 64,
"tx_hash": "0x" + "b" * 64,
"sender": "alice",
"recipient": "bob",
"payload": {"amount": 1},
"nonce": 1,
"fee": 0,
"recipient": "carol",
"payload": {"amount": 2},
"nonce": 7,
"fee": 1,
"type": "TRANSFER",
}
_publish("transactions", payload)
message = websocket.receive_json()
assert message == payload
assert websocket.receive_json() == payload
# After closing the websocket, publishing again should not raise and should not hang.
_publish(
"transactions",
{
"tx_hash": "0x" + "c" * 64,
"sender": "alice",
"recipient": "dave",
"payload": {"amount": 3},
"nonce": 8,
"fee": 1,
"type": "TRANSFER",
},
)