feat: add foreign key constraints and metrics for blockchain node
This commit is contained in:
Binary file not shown.
@ -12,23 +12,85 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '80bc0020bde2'
|
||||
down_revision: Union[str, Sequence[str], None] = 'e31f486f1484'
|
||||
revision: str = "80bc0020bde2"
|
||||
down_revision: Union[str, Sequence[str], None] = "e31f486f1484"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_foreign_key(None, 'receipt', 'block', ['block_height'], ['height'])
|
||||
op.create_foreign_key(None, 'transaction', 'block', ['block_height'], ['height'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Recreate transaction table with foreign key to block.height
|
||||
op.drop_table("transaction")
|
||||
op.create_table(
|
||||
"transaction",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), sa.ForeignKey("block.height"), nullable=True),
|
||||
sa.Column("sender", sa.String(), nullable=False),
|
||||
sa.Column("recipient", sa.String(), nullable=False),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||
op.create_index("ix_transaction_block_height", "transaction", ["block_height"], unique=False)
|
||||
op.create_index("ix_transaction_created_at", "transaction", ["created_at"], unique=False)
|
||||
|
||||
# Recreate receipt table with foreign key to block.height
|
||||
op.drop_table("receipt")
|
||||
op.create_table(
|
||||
"receipt",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("job_id", sa.String(), nullable=False),
|
||||
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), sa.ForeignKey("block.height"), nullable=True),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'transaction', type_='foreignkey')
|
||||
op.drop_constraint(None, 'receipt', type_='foreignkey')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Revert receipt table without foreign key
|
||||
op.drop_table("receipt")
|
||||
op.create_table(
|
||||
"receipt",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("job_id", sa.String(), nullable=False),
|
||||
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||
|
||||
# Revert transaction table without foreign key
|
||||
op.drop_table("transaction")
|
||||
op.create_table(
|
||||
"transaction",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||
sa.Column("sender", sa.String(), nullable=False),
|
||||
sa.Column("recipient", sa.String(), nullable=False),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||
op.create_index("ix_transaction_block_height", "transaction", ["block_height"], unique=False)
|
||||
op.create_index("ix_transaction_created_at", "transaction", ["created_at"], unique=False)
|
||||
|
||||
43
apps/blockchain-node/observability/README.md
Normal file
43
apps/blockchain-node/observability/README.md
Normal file
@ -0,0 +1,43 @@
|
||||
# Blockchain Node Observability
|
||||
|
||||
This directory contains Prometheus and Grafana assets for the devnet environment. The stack relies on the HTTP `/metrics` endpoint exposed by:
|
||||
|
||||
1. The blockchain node API (`http://127.0.0.1:8080/metrics`).
|
||||
2. The mock coordinator/miner exporter (`http://127.0.0.1:8090/metrics`).
|
||||
|
||||
## Files
|
||||
|
||||
- `prometheus.yml` – Scrapes both blockchain node and mock coordinator/miner metrics.
|
||||
- `grafana-dashboard.json` – Panels for block interval, RPC throughput, miner activity, coordinator receipt flow, **plus new gossip queue, subscriber, and publication rate panels**.
|
||||
- `alerts.yml` – Alertmanager rules highlighting proposer stalls, miner errors, and coordinator receipt drop-offs.
|
||||
- `gossip-recording-rules.yml` – Prometheus recording rules that derive queue/subscriber gauges and publication rates from gossip metrics.
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Launch Prometheus using the sample config
|
||||
prometheus --config.file=apps/blockchain-node/observability/prometheus.yml
|
||||
|
||||
# Import the dashboard JSON into Grafana
|
||||
grafana-cli dashboards import apps/blockchain-node/observability/grafana-dashboard.json
|
||||
|
||||
# Run Alertmanager with the example rules
|
||||
alertmanager --config.file=apps/blockchain-node/observability/alerts.yml
|
||||
|
||||
# Reload Prometheus and Alertmanager after tuning thresholds
|
||||
kill -HUP $(pgrep prometheus)
|
||||
kill -HUP $(pgrep alertmanager)
|
||||
```
|
||||
|
||||
> **Tip:** The devnet helper `scripts/devnet_up.sh` seeds the metrics endpoints. After running it, both scrape targets will begin emitting data in under a minute.
|
||||
|
||||
## Gossip Observability
|
||||
|
||||
Recent updates instrumented the gossip broker with Prometheus counters and gauges. Key metrics surfaced via the recording rules and dashboard include:
|
||||
|
||||
- `gossip_publications_rate_per_sec` and `gossip_broadcast_publications_rate_per_sec` – per-second publication throughput for in-memory and broadcast backends.
|
||||
- `gossip_publications_topic_rate_per_sec` – topic-level publication rate time series (Grafana panel “Gossip Publication Rate by Topic”).
|
||||
- `gossip_queue_size_by_topic` – instantaneous queue depth per topic (“Gossip Queue Depth by Topic”).
|
||||
- `gossip_subscribers_by_topic`, `gossip_subscribers_total`, `gossip_broadcast_subscribers_total` – subscriber counts (“Gossip Subscriber Counts”).
|
||||
|
||||
Use these panels to monitor convergence/back-pressure during load tests (for example with `scripts/ws_load_test.py`) when running against a Redis-backed broadcast backend.
|
||||
43
apps/blockchain-node/observability/alerts.yml
Normal file
43
apps/blockchain-node/observability/alerts.yml
Normal file
@ -0,0 +1,43 @@
|
||||
groups:
|
||||
- name: blockchain-node
|
||||
rules:
|
||||
- alert: BlockProposalStalled
|
||||
expr: (block_interval_seconds_sum / block_interval_seconds_count) > 5
|
||||
for: 1m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "Block production interval exceeded 5s"
|
||||
description: |
|
||||
Average block interval is {{ $value }} seconds, exceeding the expected cadence.
|
||||
|
||||
- alert: BlockProposalDown
|
||||
expr: (block_interval_seconds_sum / block_interval_seconds_count) > 10
|
||||
for: 2m
|
||||
labels:
|
||||
severity: critical
|
||||
annotations:
|
||||
summary: "Block production halted"
|
||||
description: |
|
||||
Block intervals have spiked above 10 seconds for more than two minutes.
|
||||
Check proposer loop and database state.
|
||||
|
||||
- alert: MinerErrorsDetected
|
||||
expr: miner_error_rate > 0
|
||||
for: 1m
|
||||
labels:
|
||||
severity: critical
|
||||
annotations:
|
||||
summary: "Miner mock reporting errors"
|
||||
description: |
|
||||
The miner mock error gauge is {{ $value }}. Investigate miner telemetry.
|
||||
|
||||
- alert: CoordinatorReceiptDrop
|
||||
expr: rate(miner_receipts_attested_total[5m]) == 0
|
||||
for: 5m
|
||||
labels:
|
||||
severity: warning
|
||||
annotations:
|
||||
summary: "No receipts attested in 5 minutes"
|
||||
description: |
|
||||
Receipt attestations ceased during the last five minutes. Inspect coordinator connectivity.
|
||||
@ -0,0 +1,36 @@
|
||||
groups:
|
||||
- name: gossip_metrics
|
||||
interval: 15s
|
||||
rules:
|
||||
- record: gossip_publications_rate_per_sec
|
||||
expr: rate(gossip_publications_total[1m])
|
||||
|
||||
- record: gossip_broadcast_publications_rate_per_sec
|
||||
expr: rate(gossip_broadcast_publications_total[1m])
|
||||
|
||||
- record: gossip_publications_topic_rate_per_sec
|
||||
expr: label_replace(
|
||||
rate({__name__=~"gossip_publications_topic_.*"}[1m]),
|
||||
"topic",
|
||||
"$1",
|
||||
"__name__",
|
||||
"gossip_publications_topic_(.*)"
|
||||
)
|
||||
|
||||
- record: gossip_queue_size_by_topic
|
||||
expr: label_replace(
|
||||
{__name__=~"gossip_queue_size_.*"},
|
||||
"topic",
|
||||
"$1",
|
||||
"__name__",
|
||||
"gossip_queue_size_(.*)"
|
||||
)
|
||||
|
||||
- record: gossip_subscribers_by_topic
|
||||
expr: label_replace(
|
||||
{__name__=~"gossip_subscribers_topic_.*"},
|
||||
"topic",
|
||||
"$1",
|
||||
"__name__",
|
||||
"gossip_subscribers_topic_(.*)"
|
||||
)
|
||||
377
apps/blockchain-node/observability/grafana-dashboard.json
Normal file
377
apps/blockchain-node/observability/grafana-dashboard.json
Normal file
@ -0,0 +1,377 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "grafana",
|
||||
"uid": "grafana"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "AITBC devnet observability for blockchain node, coordinator, and miner mock.",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"iteration": 1727420700000,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"unit": "s"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 1,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "block_interval_seconds_sum / block_interval_seconds_count",
|
||||
"legendFormat": "avg block interval",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Block Interval (seconds)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"unit": "ops"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(rpc_send_tx_total[5m])",
|
||||
"legendFormat": "sendTx",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "rate(rpc_submit_receipt_total[5m])",
|
||||
"legendFormat": "submitReceipt",
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"expr": "rate(rpc_get_head_total[5m])",
|
||||
"legendFormat": "getHead",
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"title": "RPC Throughput",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 8
|
||||
},
|
||||
"id": 3,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "miner_active_jobs",
|
||||
"legendFormat": "active jobs",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "miner_error_rate",
|
||||
"legendFormat": "error gauge",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Miner Activity",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 8
|
||||
},
|
||||
"id": 4,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(miner_receipts_attested_total[5m])",
|
||||
"legendFormat": "receipts attested",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "rate(miner_receipts_unknown_total[5m])",
|
||||
"legendFormat": "unknown receipts",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Coordinator Receipt Flow",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
},
|
||||
"id": 5,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "gossip_queue_size_by_topic",
|
||||
"legendFormat": "{{topic}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Gossip Queue Depth by Topic",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 16
|
||||
},
|
||||
"id": 6,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "gossip_subscribers_by_topic",
|
||||
"legendFormat": "{{topic}}",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "gossip_subscribers_total",
|
||||
"legendFormat": "total subscribers",
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"expr": "gossip_broadcast_subscribers_total",
|
||||
"legendFormat": "broadcast subscribers",
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"title": "Gossip Subscriber Counts",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"unit": "ops"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 24
|
||||
},
|
||||
"id": 7,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "gossip_publications_rate_per_sec",
|
||||
"legendFormat": "memory backend",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "gossip_broadcast_publications_rate_per_sec",
|
||||
"legendFormat": "broadcast backend",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"title": "Gossip Publication Rate (total)",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "PROMETHEUS_DS"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"unit": "ops"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 24
|
||||
},
|
||||
"id": 8,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"displayMode": "table",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "gossip_publications_topic_rate_per_sec",
|
||||
"legendFormat": "{{topic}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Gossip Publication Rate by Topic",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"refresh": "10s",
|
||||
"schemaVersion": 39,
|
||||
"style": "dark",
|
||||
"tags": [
|
||||
"aitbc",
|
||||
"blockchain-node"
|
||||
],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-30m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {},
|
||||
"timezone": "",
|
||||
"title": "AITBC Blockchain Node",
|
||||
"uid": null,
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
||||
28
apps/blockchain-node/observability/prometheus.yml
Normal file
28
apps/blockchain-node/observability/prometheus.yml
Normal file
@ -0,0 +1,28 @@
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
evaluation_interval: 10s
|
||||
|
||||
alerting:
|
||||
alertmanagers:
|
||||
- static_configs:
|
||||
- targets:
|
||||
- "127.0.0.1:9093"
|
||||
|
||||
scrape_configs:
|
||||
- job_name: "blockchain-node"
|
||||
static_configs:
|
||||
- targets:
|
||||
- "127.0.0.1:8080"
|
||||
labels:
|
||||
service: "blockchain-node"
|
||||
|
||||
- job_name: "mock-coordinator"
|
||||
static_configs:
|
||||
- targets:
|
||||
- "127.0.0.1:8090"
|
||||
labels:
|
||||
service: "mock-coordinator"
|
||||
|
||||
rule_files:
|
||||
- alerts.yml
|
||||
- gossip-recording-rules.yml
|
||||
@ -3,9 +3,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import time
|
||||
from typing import Dict
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.responses import PlainTextResponse
|
||||
|
||||
from aitbc_chain.metrics import metrics_registry
|
||||
|
||||
app = FastAPI(title="Mock Coordinator API", version="0.1.0")
|
||||
|
||||
@ -15,6 +20,17 @@ MOCK_JOBS: Dict[str, Dict[str, str]] = {
|
||||
}
|
||||
|
||||
|
||||
def _simulate_miner_metrics() -> None:
|
||||
metrics_registry.set_gauge("miner_active_jobs", float(random.randint(0, 5)))
|
||||
metrics_registry.set_gauge("miner_error_rate", float(random.randint(0, 1)))
|
||||
metrics_registry.observe("miner_job_duration_seconds", random.uniform(1.0, 5.0))
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def _startup() -> None:
|
||||
_simulate_miner_metrics()
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health() -> Dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
@ -24,15 +40,23 @@ def health() -> Dict[str, str]:
|
||||
def attest_receipt(payload: Dict[str, str]) -> Dict[str, str | bool]:
|
||||
job_id = payload.get("job_id")
|
||||
if job_id in MOCK_JOBS:
|
||||
metrics_registry.increment("miner_receipts_attested_total")
|
||||
return {
|
||||
"exists": True,
|
||||
"paid": True,
|
||||
"not_double_spent": True,
|
||||
"quote": MOCK_JOBS[job_id],
|
||||
}
|
||||
metrics_registry.increment("miner_receipts_unknown_total")
|
||||
return {
|
||||
"exists": False,
|
||||
"paid": False,
|
||||
"not_double_spent": False,
|
||||
"quote": {},
|
||||
}
|
||||
|
||||
|
||||
@app.get("/metrics", response_class=PlainTextResponse)
|
||||
def metrics() -> str:
|
||||
metrics_registry.observe("miner_metrics_scrape_duration_seconds", random.uniform(0.001, 0.01))
|
||||
return metrics_registry.render_prometheus()
|
||||
|
||||
224
apps/blockchain-node/scripts/ws_load_test.py
Normal file
224
apps/blockchain-node/scripts/ws_load_test.py
Normal file
@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Asynchronous load harness for blockchain-node WebSocket + gossip pipeline."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import random
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
import websockets
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublishStats:
|
||||
sent: int = 0
|
||||
failed: int = 0
|
||||
latencies: List[float] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def average_latency_ms(self) -> Optional[float]:
|
||||
if not self.latencies:
|
||||
return None
|
||||
return (sum(self.latencies) / len(self.latencies)) * 1000.0
|
||||
|
||||
@property
|
||||
def p95_latency_ms(self) -> Optional[float]:
|
||||
if not self.latencies:
|
||||
return None
|
||||
sorted_latencies = sorted(self.latencies)
|
||||
index = int(len(sorted_latencies) * 0.95)
|
||||
index = min(index, len(sorted_latencies) - 1)
|
||||
return sorted_latencies[index] * 1000.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SubscriptionStats:
|
||||
messages: int = 0
|
||||
disconnects: int = 0
|
||||
|
||||
|
||||
async def _publish_transactions(
|
||||
base_url: str,
|
||||
stats: PublishStats,
|
||||
stop_event: asyncio.Event,
|
||||
rate_hz: float,
|
||||
job_id: str,
|
||||
client_id: str,
|
||||
timeout: float,
|
||||
) -> None:
|
||||
interval = 1 / rate_hz if rate_hz > 0 else 0
|
||||
async with httpx.AsyncClient(base_url=base_url, timeout=timeout) as client:
|
||||
while not stop_event.is_set():
|
||||
payload = {
|
||||
"type": "TRANSFER",
|
||||
"sender": f"miner-{client_id}",
|
||||
"nonce": stats.sent,
|
||||
"fee": 1,
|
||||
"payload": {
|
||||
"job_id": job_id,
|
||||
"amount": random.randint(1, 10),
|
||||
"timestamp": time.time_ns(),
|
||||
},
|
||||
}
|
||||
started = time.perf_counter()
|
||||
try:
|
||||
response = await client.post("/rpc/sendTx", json=payload)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPError:
|
||||
stats.failed += 1
|
||||
else:
|
||||
stats.sent += 1
|
||||
stats.latencies.append(time.perf_counter() - started)
|
||||
|
||||
if interval:
|
||||
try:
|
||||
await asyncio.wait_for(stop_event.wait(), timeout=interval)
|
||||
except asyncio.TimeoutError:
|
||||
continue
|
||||
else:
|
||||
await asyncio.sleep(0)
|
||||
|
||||
|
||||
async def _subscription_worker(
|
||||
websocket_url: str,
|
||||
stats: SubscriptionStats,
|
||||
stop_event: asyncio.Event,
|
||||
client_name: str,
|
||||
) -> None:
|
||||
while not stop_event.is_set():
|
||||
try:
|
||||
async with websockets.connect(websocket_url) as ws:
|
||||
while not stop_event.is_set():
|
||||
try:
|
||||
message = await asyncio.wait_for(ws.recv(), timeout=1.0)
|
||||
except asyncio.TimeoutError:
|
||||
continue
|
||||
except websockets.ConnectionClosed:
|
||||
stats.disconnects += 1
|
||||
break
|
||||
else:
|
||||
_ = message # lightweight backpressure test only
|
||||
stats.messages += 1
|
||||
except OSError:
|
||||
stats.disconnects += 1
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
|
||||
async def run_load(args: argparse.Namespace) -> None:
|
||||
stop_event = asyncio.Event()
|
||||
|
||||
publish_stats: List[PublishStats] = [PublishStats() for _ in range(args.publishers)]
|
||||
subscription_stats: Dict[str, SubscriptionStats] = {
|
||||
"blocks": SubscriptionStats(),
|
||||
"transactions": SubscriptionStats(),
|
||||
}
|
||||
|
||||
publisher_tasks = [
|
||||
asyncio.create_task(
|
||||
_publish_transactions(
|
||||
base_url=args.http_base,
|
||||
stats=publish_stats[i],
|
||||
stop_event=stop_event,
|
||||
rate_hz=args.publish_rate,
|
||||
job_id=f"load-test-job-{i}",
|
||||
client_id=f"{i}",
|
||||
timeout=args.http_timeout,
|
||||
),
|
||||
name=f"publisher-{i}",
|
||||
)
|
||||
for i in range(args.publishers)
|
||||
]
|
||||
|
||||
subscriber_tasks = [
|
||||
asyncio.create_task(
|
||||
_subscription_worker(
|
||||
websocket_url=f"{args.ws_base}/blocks",
|
||||
stats=subscription_stats["blocks"],
|
||||
stop_event=stop_event,
|
||||
client_name="blocks",
|
||||
),
|
||||
name="subscriber-blocks",
|
||||
),
|
||||
asyncio.create_task(
|
||||
_subscription_worker(
|
||||
websocket_url=f"{args.ws_base}/transactions",
|
||||
stats=subscription_stats["transactions"],
|
||||
stop_event=stop_event,
|
||||
client_name="transactions",
|
||||
),
|
||||
name="subscriber-transactions",
|
||||
),
|
||||
]
|
||||
|
||||
all_tasks = publisher_tasks + subscriber_tasks
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(stop_event.wait(), timeout=args.duration)
|
||||
except asyncio.TimeoutError:
|
||||
pass
|
||||
finally:
|
||||
stop_event.set()
|
||||
await asyncio.gather(*all_tasks, return_exceptions=True)
|
||||
|
||||
_print_summary(publish_stats, subscription_stats)
|
||||
|
||||
|
||||
def _print_summary(publish_stats: List[PublishStats], subscription_stats: Dict[str, SubscriptionStats]) -> None:
|
||||
total_sent = sum(s.sent for s in publish_stats)
|
||||
total_failed = sum(s.failed for s in publish_stats)
|
||||
all_latencies = [lat for s in publish_stats for lat in s.latencies]
|
||||
|
||||
summary = {
|
||||
"publish": {
|
||||
"total_sent": total_sent,
|
||||
"total_failed": total_failed,
|
||||
"average_latency_ms": (sum(all_latencies) / len(all_latencies) * 1000.0) if all_latencies else None,
|
||||
"p95_latency_ms": _p95(all_latencies),
|
||||
},
|
||||
"subscriptions": {
|
||||
name: {
|
||||
"messages": stats.messages,
|
||||
"disconnects": stats.disconnects,
|
||||
}
|
||||
for name, stats in subscription_stats.items()
|
||||
},
|
||||
}
|
||||
print(json.dumps(summary, indent=2))
|
||||
|
||||
|
||||
def _p95(latencies: List[float]) -> Optional[float]:
|
||||
if not latencies:
|
||||
return None
|
||||
sorted_latencies = sorted(latencies)
|
||||
index = int(len(sorted_latencies) * 0.95)
|
||||
index = min(index, len(sorted_latencies) - 1)
|
||||
return sorted_latencies[index] * 1000.0
|
||||
|
||||
|
||||
def _parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="AITBC blockchain-node WebSocket load harness")
|
||||
parser.add_argument("--http-base", default="http://127.0.0.1:8080", help="Base URL for REST API")
|
||||
parser.add_argument("--ws-base", default="ws://127.0.0.1:8080/rpc/ws", help="Base URL for WebSocket API")
|
||||
parser.add_argument("--duration", type=float, default=30.0, help="Duration in seconds")
|
||||
parser.add_argument("--publishers", type=int, default=4, help="Concurrent transaction publishers")
|
||||
parser.add_argument("--publish-rate", type=float, default=5.0, help="Transactions per second per publisher")
|
||||
parser.add_argument("--http-timeout", type=float, default=5.0, help="HTTP client timeout in seconds")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = _parse_args()
|
||||
try:
|
||||
asyncio.run(run_load(args))
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@ -1,24 +1,35 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import APIRouter, FastAPI
|
||||
from fastapi.responses import PlainTextResponse
|
||||
|
||||
from .config import settings
|
||||
from .database import init_db
|
||||
from .gossip import create_backend, gossip_broker
|
||||
from .metrics import metrics_registry
|
||||
from .rpc.router import router as rpc_router
|
||||
from .rpc.websocket import router as websocket_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
init_db()
|
||||
backend = create_backend(
|
||||
settings.gossip_backend,
|
||||
broadcast_url=settings.gossip_broadcast_url,
|
||||
)
|
||||
await gossip_broker.set_backend(backend)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
await gossip_broker.shutdown()
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(title="AITBC Blockchain Node", version="0.1.0", lifespan=lifespan)
|
||||
app.include_router(rpc_router, prefix="/rpc", tags=["rpc"])
|
||||
|
||||
app.include_router(websocket_router, prefix="/rpc")
|
||||
metrics_router = APIRouter()
|
||||
|
||||
@metrics_router.get("/metrics", response_class=PlainTextResponse, tags=["metrics"], summary="Prometheus metrics")
|
||||
|
||||
@ -26,5 +26,8 @@ class ChainSettings(BaseSettings):
|
||||
|
||||
block_time_seconds: int = 2
|
||||
|
||||
gossip_backend: str = "memory"
|
||||
gossip_broadcast_url: Optional[str] = None
|
||||
|
||||
|
||||
settings = ChainSettings()
|
||||
|
||||
@ -11,6 +11,7 @@ from sqlmodel import Session, select
|
||||
from ..logging import get_logger
|
||||
from ..metrics import metrics_registry
|
||||
from ..models import Block
|
||||
from ..gossip import gossip_broker
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -78,9 +79,11 @@ class PoAProposer:
|
||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
next_height = 0
|
||||
parent_hash = "0x00"
|
||||
interval_seconds: Optional[float] = None
|
||||
if head is not None:
|
||||
next_height = head.height + 1
|
||||
parent_hash = head.hash
|
||||
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
||||
|
||||
timestamp = datetime.utcnow()
|
||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
||||
@ -99,6 +102,21 @@ class PoAProposer:
|
||||
|
||||
metrics_registry.increment("blocks_proposed_total")
|
||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
||||
if interval_seconds is not None and interval_seconds >= 0:
|
||||
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
||||
|
||||
asyncio.create_task(
|
||||
gossip_broker.publish(
|
||||
"blocks",
|
||||
{
|
||||
"height": block.height,
|
||||
"hash": block.hash,
|
||||
"parent_hash": block.parent_hash,
|
||||
"timestamp": block.timestamp.isoformat(),
|
||||
"tx_count": block.tx_count,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
self._logger.info(
|
||||
"Proposed block",
|
||||
@ -129,6 +147,19 @@ class PoAProposer:
|
||||
)
|
||||
session.add(genesis)
|
||||
session.commit()
|
||||
asyncio.create_task(
|
||||
gossip_broker.publish(
|
||||
"blocks",
|
||||
{
|
||||
"height": genesis.height,
|
||||
"hash": genesis.hash,
|
||||
"parent_hash": genesis.parent_hash,
|
||||
"timestamp": genesis.timestamp.isoformat(),
|
||||
"tx_count": genesis.tx_count,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
self._logger.info("Created genesis block", extra={"hash": genesis_hash})
|
||||
|
||||
def _fetch_chain_head(self) -> Optional[Block]:
|
||||
|
||||
17
apps/blockchain-node/src/aitbc_chain/gossip/__init__.py
Normal file
17
apps/blockchain-node/src/aitbc_chain/gossip/__init__.py
Normal file
@ -0,0 +1,17 @@
|
||||
from .broker import (
|
||||
BroadcastGossipBackend,
|
||||
GossipBroker,
|
||||
InMemoryGossipBackend,
|
||||
TopicSubscription,
|
||||
create_backend,
|
||||
gossip_broker,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"BroadcastGossipBackend",
|
||||
"GossipBroker",
|
||||
"InMemoryGossipBackend",
|
||||
"TopicSubscription",
|
||||
"create_backend",
|
||||
"gossip_broker",
|
||||
]
|
||||
254
apps/blockchain-node/src/aitbc_chain/gossip/broker.py
Normal file
254
apps/blockchain-node/src/aitbc_chain/gossip/broker.py
Normal file
@ -0,0 +1,254 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Dict, List, Optional, Set
|
||||
|
||||
try:
|
||||
from starlette.broadcast import Broadcast
|
||||
except ImportError: # pragma: no cover - Starlette is an indirect dependency of FastAPI
|
||||
Broadcast = None # type: ignore[assignment]
|
||||
|
||||
from ..metrics import metrics_registry
|
||||
|
||||
|
||||
def _increment_publication(metric_prefix: str, topic: str) -> None:
|
||||
metrics_registry.increment(f"{metric_prefix}_total")
|
||||
metrics_registry.increment(f"{metric_prefix}_topic_{topic}")
|
||||
|
||||
|
||||
def _set_queue_gauge(topic: str, size: int) -> None:
|
||||
metrics_registry.set_gauge(f"gossip_queue_size_{topic}", float(size))
|
||||
|
||||
|
||||
def _update_subscriber_metrics(topics: Dict[str, List["asyncio.Queue[Any]"]]) -> None:
|
||||
for topic, queues in topics.items():
|
||||
metrics_registry.set_gauge(f"gossip_subscribers_topic_{topic}", float(len(queues)))
|
||||
total = sum(len(queues) for queues in topics.values())
|
||||
metrics_registry.set_gauge("gossip_subscribers_total", float(total))
|
||||
|
||||
|
||||
def _clear_topic_metrics(topic: str) -> None:
|
||||
metrics_registry.set_gauge(f"gossip_subscribers_topic_{topic}", 0.0)
|
||||
_set_queue_gauge(topic, 0)
|
||||
|
||||
@dataclass
|
||||
class TopicSubscription:
|
||||
topic: str
|
||||
queue: "asyncio.Queue[Any]"
|
||||
_unsubscribe: Callable[[], None]
|
||||
|
||||
def close(self) -> None:
|
||||
self._unsubscribe()
|
||||
|
||||
async def get(self) -> Any:
|
||||
return await self.queue.get()
|
||||
|
||||
async def __aiter__(self): # type: ignore[override]
|
||||
try:
|
||||
while True:
|
||||
yield await self.queue.get()
|
||||
finally:
|
||||
self.close()
|
||||
|
||||
|
||||
class GossipBackend:
|
||||
async def start(self) -> None: # pragma: no cover - overridden as needed
|
||||
return None
|
||||
|
||||
async def publish(self, topic: str, message: Any) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||
raise NotImplementedError
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class InMemoryGossipBackend(GossipBackend):
|
||||
def __init__(self) -> None:
|
||||
self._topics: Dict[str, List["asyncio.Queue[Any]"]] = defaultdict(list)
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def publish(self, topic: str, message: Any) -> None:
|
||||
async with self._lock:
|
||||
queues = list(self._topics.get(topic, []))
|
||||
for queue in queues:
|
||||
await queue.put(message)
|
||||
_set_queue_gauge(topic, queue.qsize())
|
||||
_increment_publication("gossip_publications", topic)
|
||||
|
||||
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||
queue: "asyncio.Queue[Any]" = asyncio.Queue(maxsize=max_queue_size)
|
||||
|
||||
async with self._lock:
|
||||
self._topics[topic].append(queue)
|
||||
_update_subscriber_metrics(self._topics)
|
||||
|
||||
_set_queue_gauge(topic, queue.qsize())
|
||||
|
||||
def _unsubscribe() -> None:
|
||||
async def _remove() -> None:
|
||||
async with self._lock:
|
||||
queues = self._topics.get(topic)
|
||||
if queues is None:
|
||||
return
|
||||
if queue in queues:
|
||||
queues.remove(queue)
|
||||
if not queues:
|
||||
self._topics.pop(topic, None)
|
||||
_clear_topic_metrics(topic)
|
||||
_update_subscriber_metrics(self._topics)
|
||||
|
||||
asyncio.create_task(_remove())
|
||||
|
||||
return TopicSubscription(topic=topic, queue=queue, _unsubscribe=_unsubscribe)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
async with self._lock:
|
||||
topics = list(self._topics.keys())
|
||||
self._topics.clear()
|
||||
for topic in topics:
|
||||
_clear_topic_metrics(topic)
|
||||
_update_subscriber_metrics(self._topics)
|
||||
|
||||
|
||||
class BroadcastGossipBackend(GossipBackend):
|
||||
def __init__(self, url: str) -> None:
|
||||
if Broadcast is None: # pragma: no cover - dependency is optional
|
||||
raise RuntimeError("Starlette Broadcast backend requested but starlette is not available")
|
||||
self._broadcast = Broadcast(url) # type: ignore[arg-type]
|
||||
self._tasks: Set[asyncio.Task[None]] = set()
|
||||
self._lock = asyncio.Lock()
|
||||
self._running = False
|
||||
|
||||
async def start(self) -> None:
|
||||
if not self._running:
|
||||
await self._broadcast.connect() # type: ignore[union-attr]
|
||||
self._running = True
|
||||
|
||||
async def publish(self, topic: str, message: Any) -> None:
|
||||
if not self._running:
|
||||
raise RuntimeError("Broadcast backend not started")
|
||||
payload = _encode_message(message)
|
||||
await self._broadcast.publish(topic, payload) # type: ignore[union-attr]
|
||||
_increment_publication("gossip_broadcast_publications", topic)
|
||||
|
||||
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||
if not self._running:
|
||||
raise RuntimeError("Broadcast backend not started")
|
||||
|
||||
queue: "asyncio.Queue[Any]" = asyncio.Queue(maxsize=max_queue_size)
|
||||
stop_event = asyncio.Event()
|
||||
|
||||
async def _run_subscription() -> None:
|
||||
async with self._broadcast.subscribe(topic) as subscriber: # type: ignore[attr-defined,union-attr]
|
||||
async for event in subscriber: # type: ignore[union-attr]
|
||||
if stop_event.is_set():
|
||||
break
|
||||
data = _decode_message(getattr(event, "message", event))
|
||||
try:
|
||||
await queue.put(data)
|
||||
_set_queue_gauge(topic, queue.qsize())
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
|
||||
task = asyncio.create_task(_run_subscription(), name=f"broadcast-sub:{topic}")
|
||||
async with self._lock:
|
||||
self._tasks.add(task)
|
||||
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", float(len(self._tasks)))
|
||||
|
||||
def _unsubscribe() -> None:
|
||||
async def _stop() -> None:
|
||||
stop_event.set()
|
||||
task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await task
|
||||
async with self._lock:
|
||||
self._tasks.discard(task)
|
||||
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", float(len(self._tasks)))
|
||||
|
||||
asyncio.create_task(_stop())
|
||||
|
||||
return TopicSubscription(topic=topic, queue=queue, _unsubscribe=_unsubscribe)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
async with self._lock:
|
||||
tasks = list(self._tasks)
|
||||
self._tasks.clear()
|
||||
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", 0.0)
|
||||
for task in tasks:
|
||||
task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await task
|
||||
if self._running:
|
||||
await self._broadcast.disconnect() # type: ignore[union-attr]
|
||||
self._running = False
|
||||
|
||||
|
||||
class GossipBroker:
|
||||
def __init__(self, backend: GossipBackend) -> None:
|
||||
self._backend = backend
|
||||
self._lock = asyncio.Lock()
|
||||
self._started = False
|
||||
|
||||
async def publish(self, topic: str, message: Any) -> None:
|
||||
if not self._started:
|
||||
await self._backend.start()
|
||||
self._started = True
|
||||
await self._backend.publish(topic, message)
|
||||
|
||||
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||
if not self._started:
|
||||
await self._backend.start()
|
||||
self._started = True
|
||||
return await self._backend.subscribe(topic, max_queue_size=max_queue_size)
|
||||
|
||||
async def set_backend(self, backend: GossipBackend) -> None:
|
||||
await backend.start()
|
||||
async with self._lock:
|
||||
previous = self._backend
|
||||
self._backend = backend
|
||||
self._started = True
|
||||
await previous.shutdown()
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
await self._backend.shutdown()
|
||||
self._started = False
|
||||
metrics_registry.set_gauge("gossip_subscribers_total", 0.0)
|
||||
|
||||
|
||||
def create_backend(backend_type: str, *, broadcast_url: Optional[str] = None) -> GossipBackend:
|
||||
backend = backend_type.lower()
|
||||
if backend in {"memory", "inmemory", "local"}:
|
||||
return InMemoryGossipBackend()
|
||||
if backend in {"broadcast", "starlette", "redis"}:
|
||||
if not broadcast_url:
|
||||
raise ValueError("Broadcast backend requires a gossip_broadcast_url setting")
|
||||
return BroadcastGossipBackend(broadcast_url)
|
||||
raise ValueError(f"Unsupported gossip backend '{backend_type}'")
|
||||
|
||||
|
||||
def _encode_message(message: Any) -> Any:
|
||||
if isinstance(message, (str, bytes, bytearray)):
|
||||
return message
|
||||
return json.dumps(message, separators=(",", ":"))
|
||||
|
||||
|
||||
def _decode_message(message: Any) -> Any:
|
||||
if isinstance(message, (bytes, bytearray)):
|
||||
message = message.decode("utf-8")
|
||||
if isinstance(message, str):
|
||||
try:
|
||||
return json.loads(message)
|
||||
except json.JSONDecodeError:
|
||||
return message
|
||||
return message
|
||||
|
||||
|
||||
gossip_broker = GossipBroker(InMemoryGossipBackend())
|
||||
|
||||
@ -15,6 +15,7 @@ class MetricsRegistry:
|
||||
def __init__(self) -> None:
|
||||
self._counters: Dict[str, float] = {}
|
||||
self._gauges: Dict[str, float] = {}
|
||||
self._summaries: Dict[str, tuple[float, float]] = {}
|
||||
self._lock = Lock()
|
||||
|
||||
def increment(self, name: str, amount: float = 1.0) -> None:
|
||||
@ -25,6 +26,17 @@ class MetricsRegistry:
|
||||
with self._lock:
|
||||
self._gauges[name] = value
|
||||
|
||||
def observe(self, name: str, value: float) -> None:
|
||||
with self._lock:
|
||||
count, total = self._summaries.get(name, (0.0, 0.0))
|
||||
self._summaries[name] = (count + 1.0, total + value)
|
||||
|
||||
def reset(self) -> None:
|
||||
with self._lock:
|
||||
self._counters.clear()
|
||||
self._gauges.clear()
|
||||
self._summaries.clear()
|
||||
|
||||
def render_prometheus(self) -> str:
|
||||
with self._lock:
|
||||
lines: list[str] = []
|
||||
@ -34,6 +46,10 @@ class MetricsRegistry:
|
||||
for name, value in sorted(self._gauges.items()):
|
||||
lines.append(f"# TYPE {name} gauge")
|
||||
lines.append(f"{name} {value}")
|
||||
for name, (count, total) in sorted(self._summaries.items()):
|
||||
lines.append(f"# TYPE {name} summary")
|
||||
lines.append(f"{name}_count {count}")
|
||||
lines.append(f"{name}_sum {total}")
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status
|
||||
@ -8,6 +10,7 @@ from pydantic import BaseModel, Field, model_validator
|
||||
from sqlmodel import select
|
||||
|
||||
from ..database import session_scope
|
||||
from ..gossip import gossip_broker
|
||||
from ..mempool import get_mempool
|
||||
from ..metrics import metrics_registry
|
||||
from ..models import Account, Block, Receipt, Transaction
|
||||
@ -64,84 +67,134 @@ class MintFaucetRequest(BaseModel):
|
||||
|
||||
@router.get("/head", summary="Get current chain head")
|
||||
async def get_head() -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_head_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
if result is None:
|
||||
metrics_registry.increment("rpc_get_head_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
||||
return {
|
||||
"height": result.height,
|
||||
"hash": result.hash,
|
||||
"timestamp": result.timestamp.isoformat(),
|
||||
"tx_count": result.tx_count,
|
||||
}
|
||||
metrics_registry.increment("rpc_get_head_success_total")
|
||||
metrics_registry.observe("rpc_get_head_duration_seconds", time.perf_counter() - start)
|
||||
return {
|
||||
"height": result.height,
|
||||
"hash": result.hash,
|
||||
"timestamp": result.timestamp.isoformat(),
|
||||
"tx_count": result.tx_count,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/blocks/{height}", summary="Get block by height")
|
||||
async def get_block(height: int) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_block_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
block = session.exec(select(Block).where(Block.height == height)).first()
|
||||
if block is None:
|
||||
metrics_registry.increment("rpc_get_block_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="block not found")
|
||||
return {
|
||||
"height": block.height,
|
||||
"hash": block.hash,
|
||||
"parent_hash": block.parent_hash,
|
||||
"timestamp": block.timestamp.isoformat(),
|
||||
"tx_count": block.tx_count,
|
||||
"state_root": block.state_root,
|
||||
}
|
||||
metrics_registry.increment("rpc_get_block_success_total")
|
||||
metrics_registry.observe("rpc_get_block_duration_seconds", time.perf_counter() - start)
|
||||
return {
|
||||
"height": block.height,
|
||||
"hash": block.hash,
|
||||
"parent_hash": block.parent_hash,
|
||||
"timestamp": block.timestamp.isoformat(),
|
||||
"tx_count": block.tx_count,
|
||||
"state_root": block.state_root,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
||||
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_transaction_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
||||
if tx is None:
|
||||
metrics_registry.increment("rpc_get_transaction_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
||||
return {
|
||||
"tx_hash": tx.tx_hash,
|
||||
"block_height": tx.block_height,
|
||||
"sender": tx.sender,
|
||||
"recipient": tx.recipient,
|
||||
"payload": tx.payload,
|
||||
"created_at": tx.created_at.isoformat(),
|
||||
}
|
||||
metrics_registry.increment("rpc_get_transaction_success_total")
|
||||
metrics_registry.observe("rpc_get_transaction_duration_seconds", time.perf_counter() - start)
|
||||
return {
|
||||
"tx_hash": tx.tx_hash,
|
||||
"block_height": tx.block_height,
|
||||
"sender": tx.sender,
|
||||
"recipient": tx.recipient,
|
||||
"payload": tx.payload,
|
||||
"created_at": tx.created_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/receipts/{receipt_id}", summary="Get receipt by ID")
|
||||
async def get_receipt(receipt_id: str) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_receipt_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
receipt = session.exec(select(Receipt).where(Receipt.receipt_id == receipt_id)).first()
|
||||
if receipt is None:
|
||||
metrics_registry.increment("rpc_get_receipt_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not found")
|
||||
return _serialize_receipt(receipt)
|
||||
metrics_registry.increment("rpc_get_receipt_success_total")
|
||||
metrics_registry.observe("rpc_get_receipt_duration_seconds", time.perf_counter() - start)
|
||||
return _serialize_receipt(receipt)
|
||||
|
||||
|
||||
@router.get("/getBalance/{address}", summary="Get account balance")
|
||||
async def get_balance(address: str) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_balance_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, address)
|
||||
if account is None:
|
||||
metrics_registry.increment("rpc_get_balance_empty_total")
|
||||
metrics_registry.observe("rpc_get_balance_duration_seconds", time.perf_counter() - start)
|
||||
return {"address": address, "balance": 0, "nonce": 0}
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"updated_at": account.updated_at.isoformat(),
|
||||
}
|
||||
metrics_registry.increment("rpc_get_balance_success_total")
|
||||
metrics_registry.observe("rpc_get_balance_duration_seconds", time.perf_counter() - start)
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"updated_at": account.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/sendTx", summary="Submit a new transaction")
|
||||
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_send_tx_total")
|
||||
start = time.perf_counter()
|
||||
mempool = get_mempool()
|
||||
tx_dict = request.model_dump()
|
||||
tx_hash = mempool.add(tx_dict)
|
||||
metrics_registry.increment("rpc_send_tx_total")
|
||||
return {"tx_hash": tx_hash}
|
||||
try:
|
||||
asyncio.create_task(
|
||||
gossip_broker.publish(
|
||||
"transactions",
|
||||
{
|
||||
"tx_hash": tx_hash,
|
||||
"sender": request.sender,
|
||||
"recipient": request.recipient,
|
||||
"payload": request.payload,
|
||||
"nonce": request.nonce,
|
||||
"fee": request.fee,
|
||||
"type": request.type,
|
||||
},
|
||||
)
|
||||
)
|
||||
metrics_registry.increment("rpc_send_tx_success_total")
|
||||
return {"tx_hash": tx_hash}
|
||||
except Exception:
|
||||
metrics_registry.increment("rpc_send_tx_failed_total")
|
||||
raise
|
||||
finally:
|
||||
metrics_registry.observe("rpc_send_tx_duration_seconds", time.perf_counter() - start)
|
||||
|
||||
|
||||
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
||||
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_submit_receipt_total")
|
||||
start = time.perf_counter()
|
||||
tx_payload = {
|
||||
"type": "RECEIPT_CLAIM",
|
||||
"sender": request.sender,
|
||||
@ -151,17 +204,31 @@ async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||
"sig": request.sig,
|
||||
}
|
||||
tx_request = TransactionRequest.model_validate(tx_payload)
|
||||
metrics_registry.increment("rpc_submit_receipt_total")
|
||||
return await send_transaction(tx_request)
|
||||
try:
|
||||
response = await send_transaction(tx_request)
|
||||
metrics_registry.increment("rpc_submit_receipt_success_total")
|
||||
return response
|
||||
except HTTPException:
|
||||
metrics_registry.increment("rpc_submit_receipt_failed_total")
|
||||
raise
|
||||
except Exception:
|
||||
metrics_registry.increment("rpc_submit_receipt_failed_total")
|
||||
raise
|
||||
finally:
|
||||
metrics_registry.observe("rpc_submit_receipt_duration_seconds", time.perf_counter() - start)
|
||||
|
||||
|
||||
@router.post("/estimateFee", summary="Estimate transaction fee")
|
||||
async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_estimate_fee_total")
|
||||
start = time.perf_counter()
|
||||
base_fee = 10
|
||||
per_byte = 1
|
||||
payload_bytes = len(json.dumps(request.payload, sort_keys=True, separators=(",", ":")).encode())
|
||||
estimated_fee = base_fee + per_byte * payload_bytes
|
||||
tx_type = (request.type or "TRANSFER").upper()
|
||||
metrics_registry.increment("rpc_estimate_fee_success_total")
|
||||
metrics_registry.observe("rpc_estimate_fee_duration_seconds", time.perf_counter() - start)
|
||||
return {
|
||||
"type": tx_type,
|
||||
"base_fee": base_fee,
|
||||
@ -172,6 +239,8 @@ async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||
|
||||
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
||||
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_mint_faucet_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, request.address)
|
||||
if account is None:
|
||||
@ -181,4 +250,6 @@ async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||
account.balance += request.amount
|
||||
session.commit()
|
||||
updated_balance = account.balance
|
||||
metrics_registry.increment("rpc_mint_faucet_success_total")
|
||||
metrics_registry.observe("rpc_mint_faucet_duration_seconds", time.perf_counter() - start)
|
||||
return {"address": request.address, "balance": updated_balance}
|
||||
|
||||
34
apps/blockchain-node/src/aitbc_chain/rpc/websocket.py
Normal file
34
apps/blockchain-node/src/aitbc_chain/rpc/websocket.py
Normal file
@ -0,0 +1,34 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import AsyncIterator, Dict
|
||||
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
|
||||
from ..gossip import gossip_broker
|
||||
|
||||
router = APIRouter(prefix="/ws", tags=["ws"])
|
||||
|
||||
|
||||
async def _stream_topic(topic: str, websocket: WebSocket) -> None:
|
||||
subscription = await gossip_broker.subscribe(topic)
|
||||
try:
|
||||
while True:
|
||||
message = await subscription.get()
|
||||
await websocket.send_json(message)
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
finally:
|
||||
subscription.close()
|
||||
|
||||
|
||||
@router.websocket("/blocks")
|
||||
async def blocks_stream(websocket: WebSocket) -> None:
|
||||
await websocket.accept()
|
||||
await _stream_topic("blocks", websocket)
|
||||
|
||||
|
||||
@router.websocket("/transactions")
|
||||
async def transactions_stream(websocket: WebSocket) -> None:
|
||||
await websocket.accept()
|
||||
await _stream_topic("transactions", websocket)
|
||||
23
apps/blockchain-node/tests/conftest.py
Normal file
23
apps/blockchain-node/tests/conftest.py
Normal file
@ -0,0 +1,23 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
|
||||
from aitbc_chain.models import Block, Transaction, Receipt # noqa: F401 - ensure models imported for metadata
|
||||
|
||||
|
||||
@pytest.fixture(name="engine")
|
||||
def engine_fixture():
|
||||
engine = create_engine("sqlite:///:memory:", connect_args={"check_same_thread": False})
|
||||
SQLModel.metadata.create_all(engine)
|
||||
try:
|
||||
yield engine
|
||||
finally:
|
||||
SQLModel.metadata.drop_all(engine)
|
||||
|
||||
|
||||
@pytest.fixture(name="session")
|
||||
def session_fixture(engine):
|
||||
with Session(engine) as session:
|
||||
yield session
|
||||
session.rollback()
|
||||
76
apps/blockchain-node/tests/test_gossip_broadcast.py
Normal file
76
apps/blockchain-node/tests/test_gossip_broadcast.py
Normal file
@ -0,0 +1,76 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from aitbc_chain.app import create_app
|
||||
from aitbc_chain.gossip import BroadcastGossipBackend, InMemoryGossipBackend, gossip_broker
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def reset_broker_backend():
|
||||
previous_backend = InMemoryGossipBackend()
|
||||
await gossip_broker.set_backend(previous_backend)
|
||||
yield
|
||||
await gossip_broker.set_backend(InMemoryGossipBackend())
|
||||
|
||||
|
||||
def _run_in_thread(fn):
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_in_executor(None, fn)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_fanout_with_broadcast_backend():
|
||||
backend = BroadcastGossipBackend("memory://")
|
||||
await gossip_broker.set_backend(backend)
|
||||
|
||||
app = create_app()
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _sync_test() -> None:
|
||||
with TestClient(app) as client:
|
||||
with client.websocket_connect("/rpc/ws/transactions") as ws_a, client.websocket_connect(
|
||||
"/rpc/ws/transactions"
|
||||
) as ws_b:
|
||||
payload = {
|
||||
"tx_hash": "0x01",
|
||||
"sender": "alice",
|
||||
"recipient": "bob",
|
||||
"payload": {"amount": 1},
|
||||
"nonce": 0,
|
||||
"fee": 0,
|
||||
"type": "TRANSFER",
|
||||
}
|
||||
fut = asyncio.run_coroutine_threadsafe(gossip_broker.publish("transactions", payload), loop)
|
||||
fut.result(timeout=5.0)
|
||||
assert ws_a.receive_json() == payload
|
||||
assert ws_b.receive_json() == payload
|
||||
|
||||
await _run_in_thread(_sync_test)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_broadcast_backend_decodes_cursorless_payload():
|
||||
backend = BroadcastGossipBackend("memory://")
|
||||
await gossip_broker.set_backend(backend)
|
||||
|
||||
app = create_app()
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _sync_test() -> None:
|
||||
with TestClient(app) as client:
|
||||
with client.websocket_connect("/rpc/ws/blocks") as ws:
|
||||
payload = [
|
||||
{"height": 1, "hash": "0xabc"},
|
||||
{"height": 2, "hash": "0xdef"},
|
||||
]
|
||||
fut = asyncio.run_coroutine_threadsafe(gossip_broker.publish("blocks", payload), loop)
|
||||
fut.result(timeout=5.0)
|
||||
assert ws.receive_json() == payload
|
||||
|
||||
await _run_in_thread(_sync_test)
|
||||
92
apps/blockchain-node/tests/test_models.py
Normal file
92
apps/blockchain-node/tests/test_models.py
Normal file
@ -0,0 +1,92 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from sqlmodel import Session
|
||||
|
||||
from aitbc_chain.models import Block, Transaction, Receipt
|
||||
|
||||
|
||||
def _insert_block(session: Session, height: int = 0) -> Block:
|
||||
block = Block(
|
||||
height=height,
|
||||
hash=f"0x{'0'*63}{height}",
|
||||
parent_hash="0x" + "0" * 64,
|
||||
proposer="validator",
|
||||
tx_count=0,
|
||||
)
|
||||
session.add(block)
|
||||
session.commit()
|
||||
session.refresh(block)
|
||||
return block
|
||||
|
||||
|
||||
def test_relationships(session: Session) -> None:
|
||||
block = _insert_block(session, height=1)
|
||||
|
||||
tx = Transaction(
|
||||
tx_hash="0x" + "1" * 64,
|
||||
block_height=block.height,
|
||||
sender="alice",
|
||||
recipient="bob",
|
||||
payload={"foo": "bar"},
|
||||
)
|
||||
receipt = Receipt(
|
||||
job_id="job-1",
|
||||
receipt_id="0x" + "2" * 64,
|
||||
block_height=block.height,
|
||||
payload={},
|
||||
miner_signature={},
|
||||
coordinator_attestations=[],
|
||||
)
|
||||
session.add(tx)
|
||||
session.add(receipt)
|
||||
session.commit()
|
||||
session.refresh(tx)
|
||||
session.refresh(receipt)
|
||||
|
||||
assert tx.block is not None
|
||||
assert tx.block.hash == block.hash
|
||||
assert receipt.block is not None
|
||||
assert receipt.block.hash == block.hash
|
||||
|
||||
|
||||
def test_hash_validation_accepts_hex(session: Session) -> None:
|
||||
block = Block(
|
||||
height=10,
|
||||
hash="0x" + "a" * 64,
|
||||
parent_hash="0x" + "b" * 64,
|
||||
proposer="validator",
|
||||
)
|
||||
session.add(block)
|
||||
session.commit()
|
||||
session.refresh(block)
|
||||
|
||||
assert block.hash.startswith("0x")
|
||||
assert block.parent_hash.startswith("0x")
|
||||
|
||||
|
||||
def test_hash_validation_rejects_non_hex(session: Session) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
Block(
|
||||
height=20,
|
||||
hash="not-hex",
|
||||
parent_hash="0x" + "c" * 64,
|
||||
proposer="validator",
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
Transaction(
|
||||
tx_hash="bad",
|
||||
sender="alice",
|
||||
recipient="bob",
|
||||
payload={},
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
Receipt(
|
||||
job_id="job",
|
||||
receipt_id="oops",
|
||||
payload={},
|
||||
miner_signature={},
|
||||
coordinator_attestations=[],
|
||||
)
|
||||
46
apps/blockchain-node/tests/test_websocket.py
Normal file
46
apps/blockchain-node/tests/test_websocket.py
Normal file
@ -0,0 +1,46 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from aitbc_chain.app import create_app
|
||||
from aitbc_chain.gossip import gossip_broker
|
||||
|
||||
|
||||
def _publish(topic: str, message: dict) -> None:
|
||||
asyncio.run(gossip_broker.publish(topic, message))
|
||||
|
||||
|
||||
def test_blocks_websocket_stream() -> None:
|
||||
client = TestClient(create_app())
|
||||
|
||||
with client.websocket_connect("/rpc/ws/blocks") as websocket:
|
||||
payload = {
|
||||
"height": 1,
|
||||
"hash": "0x" + "1" * 64,
|
||||
"parent_hash": "0x" + "0" * 64,
|
||||
"timestamp": "2025-01-01T00:00:00Z",
|
||||
"tx_count": 2,
|
||||
}
|
||||
_publish("blocks", payload)
|
||||
message = websocket.receive_json()
|
||||
assert message == payload
|
||||
|
||||
|
||||
def test_transactions_websocket_stream() -> None:
|
||||
client = TestClient(create_app())
|
||||
|
||||
with client.websocket_connect("/rpc/ws/transactions") as websocket:
|
||||
payload = {
|
||||
"tx_hash": "0x" + "a" * 64,
|
||||
"sender": "alice",
|
||||
"recipient": "bob",
|
||||
"payload": {"amount": 1},
|
||||
"nonce": 1,
|
||||
"fee": 0,
|
||||
"type": "TRANSFER",
|
||||
}
|
||||
_publish("transactions", payload)
|
||||
message = websocket.receive_json()
|
||||
assert message == payload
|
||||
Reference in New Issue
Block a user