feat: add foreign key constraints and metrics for blockchain node
This commit is contained in:
11
.gitignore
vendored
11
.gitignore
vendored
@ -7,9 +7,16 @@ __pycache__/
|
|||||||
*.pyd
|
*.pyd
|
||||||
*.so
|
*.so
|
||||||
.venv/
|
.venv/
|
||||||
|
*/.venv/
|
||||||
venv/
|
venv/
|
||||||
.env
|
*.env
|
||||||
.env.*
|
*.env.*
|
||||||
|
|
||||||
|
# Databases & Alembic artifacts
|
||||||
|
*.db
|
||||||
|
*/data/*.db
|
||||||
|
alembic.ini
|
||||||
|
migrations/versions/__pycache__/
|
||||||
|
|
||||||
# Node / JS
|
# Node / JS
|
||||||
node_modules/
|
node_modules/
|
||||||
|
|||||||
6
.windsurf/workflows/fruits.md
Normal file
6
.windsurf/workflows/fruits.md
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
description: lowest-effort
|
||||||
|
auto_execution_mode: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
Identify the lowest-effort task in docs/roadmap.md and code it now
|
||||||
Binary file not shown.
@ -12,23 +12,85 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = '80bc0020bde2'
|
revision: str = "80bc0020bde2"
|
||||||
down_revision: Union[str, Sequence[str], None] = 'e31f486f1484'
|
down_revision: Union[str, Sequence[str], None] = "e31f486f1484"
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Upgrade schema."""
|
"""Upgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_foreign_key(None, 'receipt', 'block', ['block_height'], ['height'])
|
# Recreate transaction table with foreign key to block.height
|
||||||
op.create_foreign_key(None, 'transaction', 'block', ['block_height'], ['height'])
|
op.drop_table("transaction")
|
||||||
# ### end Alembic commands ###
|
op.create_table(
|
||||||
|
"transaction",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), sa.ForeignKey("block.height"), nullable=True),
|
||||||
|
sa.Column("sender", sa.String(), nullable=False),
|
||||||
|
sa.Column("recipient", sa.String(), nullable=False),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||||
|
op.create_index("ix_transaction_block_height", "transaction", ["block_height"], unique=False)
|
||||||
|
op.create_index("ix_transaction_created_at", "transaction", ["created_at"], unique=False)
|
||||||
|
|
||||||
|
# Recreate receipt table with foreign key to block.height
|
||||||
|
op.drop_table("receipt")
|
||||||
|
op.create_table(
|
||||||
|
"receipt",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), sa.ForeignKey("block.height"), nullable=True),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||||
|
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||||
|
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||||
|
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
"""Downgrade schema."""
|
"""Downgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint(None, 'transaction', type_='foreignkey')
|
# Revert receipt table without foreign key
|
||||||
op.drop_constraint(None, 'receipt', type_='foreignkey')
|
op.drop_table("receipt")
|
||||||
# ### end Alembic commands ###
|
op.create_table(
|
||||||
|
"receipt",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||||
|
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||||
|
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||||
|
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||||
|
|
||||||
|
# Revert transaction table without foreign key
|
||||||
|
op.drop_table("transaction")
|
||||||
|
op.create_table(
|
||||||
|
"transaction",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("sender", sa.String(), nullable=False),
|
||||||
|
sa.Column("recipient", sa.String(), nullable=False),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||||
|
op.create_index("ix_transaction_block_height", "transaction", ["block_height"], unique=False)
|
||||||
|
op.create_index("ix_transaction_created_at", "transaction", ["created_at"], unique=False)
|
||||||
|
|||||||
43
apps/blockchain-node/observability/README.md
Normal file
43
apps/blockchain-node/observability/README.md
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Blockchain Node Observability
|
||||||
|
|
||||||
|
This directory contains Prometheus and Grafana assets for the devnet environment. The stack relies on the HTTP `/metrics` endpoint exposed by:
|
||||||
|
|
||||||
|
1. The blockchain node API (`http://127.0.0.1:8080/metrics`).
|
||||||
|
2. The mock coordinator/miner exporter (`http://127.0.0.1:8090/metrics`).
|
||||||
|
|
||||||
|
## Files
|
||||||
|
|
||||||
|
- `prometheus.yml` – Scrapes both blockchain node and mock coordinator/miner metrics.
|
||||||
|
- `grafana-dashboard.json` – Panels for block interval, RPC throughput, miner activity, coordinator receipt flow, **plus new gossip queue, subscriber, and publication rate panels**.
|
||||||
|
- `alerts.yml` – Alertmanager rules highlighting proposer stalls, miner errors, and coordinator receipt drop-offs.
|
||||||
|
- `gossip-recording-rules.yml` – Prometheus recording rules that derive queue/subscriber gauges and publication rates from gossip metrics.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Launch Prometheus using the sample config
|
||||||
|
prometheus --config.file=apps/blockchain-node/observability/prometheus.yml
|
||||||
|
|
||||||
|
# Import the dashboard JSON into Grafana
|
||||||
|
grafana-cli dashboards import apps/blockchain-node/observability/grafana-dashboard.json
|
||||||
|
|
||||||
|
# Run Alertmanager with the example rules
|
||||||
|
alertmanager --config.file=apps/blockchain-node/observability/alerts.yml
|
||||||
|
|
||||||
|
# Reload Prometheus and Alertmanager after tuning thresholds
|
||||||
|
kill -HUP $(pgrep prometheus)
|
||||||
|
kill -HUP $(pgrep alertmanager)
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Tip:** The devnet helper `scripts/devnet_up.sh` seeds the metrics endpoints. After running it, both scrape targets will begin emitting data in under a minute.
|
||||||
|
|
||||||
|
## Gossip Observability
|
||||||
|
|
||||||
|
Recent updates instrumented the gossip broker with Prometheus counters and gauges. Key metrics surfaced via the recording rules and dashboard include:
|
||||||
|
|
||||||
|
- `gossip_publications_rate_per_sec` and `gossip_broadcast_publications_rate_per_sec` – per-second publication throughput for in-memory and broadcast backends.
|
||||||
|
- `gossip_publications_topic_rate_per_sec` – topic-level publication rate time series (Grafana panel “Gossip Publication Rate by Topic”).
|
||||||
|
- `gossip_queue_size_by_topic` – instantaneous queue depth per topic (“Gossip Queue Depth by Topic”).
|
||||||
|
- `gossip_subscribers_by_topic`, `gossip_subscribers_total`, `gossip_broadcast_subscribers_total` – subscriber counts (“Gossip Subscriber Counts”).
|
||||||
|
|
||||||
|
Use these panels to monitor convergence/back-pressure during load tests (for example with `scripts/ws_load_test.py`) when running against a Redis-backed broadcast backend.
|
||||||
43
apps/blockchain-node/observability/alerts.yml
Normal file
43
apps/blockchain-node/observability/alerts.yml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
groups:
|
||||||
|
- name: blockchain-node
|
||||||
|
rules:
|
||||||
|
- alert: BlockProposalStalled
|
||||||
|
expr: (block_interval_seconds_sum / block_interval_seconds_count) > 5
|
||||||
|
for: 1m
|
||||||
|
labels:
|
||||||
|
severity: warning
|
||||||
|
annotations:
|
||||||
|
summary: "Block production interval exceeded 5s"
|
||||||
|
description: |
|
||||||
|
Average block interval is {{ $value }} seconds, exceeding the expected cadence.
|
||||||
|
|
||||||
|
- alert: BlockProposalDown
|
||||||
|
expr: (block_interval_seconds_sum / block_interval_seconds_count) > 10
|
||||||
|
for: 2m
|
||||||
|
labels:
|
||||||
|
severity: critical
|
||||||
|
annotations:
|
||||||
|
summary: "Block production halted"
|
||||||
|
description: |
|
||||||
|
Block intervals have spiked above 10 seconds for more than two minutes.
|
||||||
|
Check proposer loop and database state.
|
||||||
|
|
||||||
|
- alert: MinerErrorsDetected
|
||||||
|
expr: miner_error_rate > 0
|
||||||
|
for: 1m
|
||||||
|
labels:
|
||||||
|
severity: critical
|
||||||
|
annotations:
|
||||||
|
summary: "Miner mock reporting errors"
|
||||||
|
description: |
|
||||||
|
The miner mock error gauge is {{ $value }}. Investigate miner telemetry.
|
||||||
|
|
||||||
|
- alert: CoordinatorReceiptDrop
|
||||||
|
expr: rate(miner_receipts_attested_total[5m]) == 0
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: warning
|
||||||
|
annotations:
|
||||||
|
summary: "No receipts attested in 5 minutes"
|
||||||
|
description: |
|
||||||
|
Receipt attestations ceased during the last five minutes. Inspect coordinator connectivity.
|
||||||
@ -0,0 +1,36 @@
|
|||||||
|
groups:
|
||||||
|
- name: gossip_metrics
|
||||||
|
interval: 15s
|
||||||
|
rules:
|
||||||
|
- record: gossip_publications_rate_per_sec
|
||||||
|
expr: rate(gossip_publications_total[1m])
|
||||||
|
|
||||||
|
- record: gossip_broadcast_publications_rate_per_sec
|
||||||
|
expr: rate(gossip_broadcast_publications_total[1m])
|
||||||
|
|
||||||
|
- record: gossip_publications_topic_rate_per_sec
|
||||||
|
expr: label_replace(
|
||||||
|
rate({__name__=~"gossip_publications_topic_.*"}[1m]),
|
||||||
|
"topic",
|
||||||
|
"$1",
|
||||||
|
"__name__",
|
||||||
|
"gossip_publications_topic_(.*)"
|
||||||
|
)
|
||||||
|
|
||||||
|
- record: gossip_queue_size_by_topic
|
||||||
|
expr: label_replace(
|
||||||
|
{__name__=~"gossip_queue_size_.*"},
|
||||||
|
"topic",
|
||||||
|
"$1",
|
||||||
|
"__name__",
|
||||||
|
"gossip_queue_size_(.*)"
|
||||||
|
)
|
||||||
|
|
||||||
|
- record: gossip_subscribers_by_topic
|
||||||
|
expr: label_replace(
|
||||||
|
{__name__=~"gossip_subscribers_topic_.*"},
|
||||||
|
"topic",
|
||||||
|
"$1",
|
||||||
|
"__name__",
|
||||||
|
"gossip_subscribers_topic_(.*)"
|
||||||
|
)
|
||||||
377
apps/blockchain-node/observability/grafana-dashboard.json
Normal file
377
apps/blockchain-node/observability/grafana-dashboard.json
Normal file
@ -0,0 +1,377 @@
|
|||||||
|
{
|
||||||
|
"annotations": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"builtIn": 1,
|
||||||
|
"datasource": {
|
||||||
|
"type": "grafana",
|
||||||
|
"uid": "grafana"
|
||||||
|
},
|
||||||
|
"enable": true,
|
||||||
|
"hide": true,
|
||||||
|
"iconColor": "rgba(0, 211, 255, 1)",
|
||||||
|
"name": "Annotations & Alerts",
|
||||||
|
"type": "dashboard"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"description": "AITBC devnet observability for blockchain node, coordinator, and miner mock.",
|
||||||
|
"editable": true,
|
||||||
|
"fiscalYearStartMonth": 0,
|
||||||
|
"gnetId": null,
|
||||||
|
"graphTooltip": 0,
|
||||||
|
"id": null,
|
||||||
|
"iteration": 1727420700000,
|
||||||
|
"links": [],
|
||||||
|
"liveNow": false,
|
||||||
|
"panels": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {},
|
||||||
|
"unit": "s"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 1,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": [],
|
||||||
|
"displayMode": "list",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "block_interval_seconds_sum / block_interval_seconds_count",
|
||||||
|
"legendFormat": "avg block interval",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Block Interval (seconds)",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {},
|
||||||
|
"unit": "ops"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 2,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "rate(rpc_send_tx_total[5m])",
|
||||||
|
"legendFormat": "sendTx",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "rate(rpc_submit_receipt_total[5m])",
|
||||||
|
"legendFormat": "submitReceipt",
|
||||||
|
"refId": "B"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "rate(rpc_get_head_total[5m])",
|
||||||
|
"legendFormat": "getHead",
|
||||||
|
"refId": "C"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "RPC Throughput",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 8
|
||||||
|
},
|
||||||
|
"id": 3,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "miner_active_jobs",
|
||||||
|
"legendFormat": "active jobs",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "miner_error_rate",
|
||||||
|
"legendFormat": "error gauge",
|
||||||
|
"refId": "B"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Miner Activity",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {},
|
||||||
|
"unit": "short"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 8
|
||||||
|
},
|
||||||
|
"id": 4,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": [],
|
||||||
|
"displayMode": "list",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "rate(miner_receipts_attested_total[5m])",
|
||||||
|
"legendFormat": "receipts attested",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "rate(miner_receipts_unknown_total[5m])",
|
||||||
|
"legendFormat": "unknown receipts",
|
||||||
|
"refId": "B"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Coordinator Receipt Flow",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 16
|
||||||
|
},
|
||||||
|
"id": 5,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "gossip_queue_size_by_topic",
|
||||||
|
"legendFormat": "{{topic}}",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Gossip Queue Depth by Topic",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {}
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 16
|
||||||
|
},
|
||||||
|
"id": 6,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "gossip_subscribers_by_topic",
|
||||||
|
"legendFormat": "{{topic}}",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "gossip_subscribers_total",
|
||||||
|
"legendFormat": "total subscribers",
|
||||||
|
"refId": "B"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "gossip_broadcast_subscribers_total",
|
||||||
|
"legendFormat": "broadcast subscribers",
|
||||||
|
"refId": "C"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Gossip Subscriber Counts",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {},
|
||||||
|
"unit": "ops"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 24
|
||||||
|
},
|
||||||
|
"id": 7,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "gossip_publications_rate_per_sec",
|
||||||
|
"legendFormat": "memory backend",
|
||||||
|
"refId": "A"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"expr": "gossip_broadcast_publications_rate_per_sec",
|
||||||
|
"legendFormat": "broadcast backend",
|
||||||
|
"refId": "B"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Gossip Publication Rate (total)",
|
||||||
|
"type": "timeseries"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"type": "prometheus",
|
||||||
|
"uid": "PROMETHEUS_DS"
|
||||||
|
},
|
||||||
|
"fieldConfig": {
|
||||||
|
"defaults": {
|
||||||
|
"custom": {},
|
||||||
|
"unit": "ops"
|
||||||
|
},
|
||||||
|
"overrides": []
|
||||||
|
},
|
||||||
|
"gridPos": {
|
||||||
|
"h": 8,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 24
|
||||||
|
},
|
||||||
|
"id": 8,
|
||||||
|
"options": {
|
||||||
|
"legend": {
|
||||||
|
"calcs": ["lastNotNull"],
|
||||||
|
"displayMode": "table",
|
||||||
|
"placement": "bottom",
|
||||||
|
"showLegend": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "gossip_publications_topic_rate_per_sec",
|
||||||
|
"legendFormat": "{{topic}}",
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Gossip Publication Rate by Topic",
|
||||||
|
"type": "timeseries"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"refresh": "10s",
|
||||||
|
"schemaVersion": 39,
|
||||||
|
"style": "dark",
|
||||||
|
"tags": [
|
||||||
|
"aitbc",
|
||||||
|
"blockchain-node"
|
||||||
|
],
|
||||||
|
"templating": {
|
||||||
|
"list": []
|
||||||
|
},
|
||||||
|
"time": {
|
||||||
|
"from": "now-30m",
|
||||||
|
"to": "now"
|
||||||
|
},
|
||||||
|
"timepicker": {},
|
||||||
|
"timezone": "",
|
||||||
|
"title": "AITBC Blockchain Node",
|
||||||
|
"uid": null,
|
||||||
|
"version": 1,
|
||||||
|
"weekStart": ""
|
||||||
|
}
|
||||||
28
apps/blockchain-node/observability/prometheus.yml
Normal file
28
apps/blockchain-node/observability/prometheus.yml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
global:
|
||||||
|
scrape_interval: 5s
|
||||||
|
evaluation_interval: 10s
|
||||||
|
|
||||||
|
alerting:
|
||||||
|
alertmanagers:
|
||||||
|
- static_configs:
|
||||||
|
- targets:
|
||||||
|
- "127.0.0.1:9093"
|
||||||
|
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: "blockchain-node"
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- "127.0.0.1:8080"
|
||||||
|
labels:
|
||||||
|
service: "blockchain-node"
|
||||||
|
|
||||||
|
- job_name: "mock-coordinator"
|
||||||
|
static_configs:
|
||||||
|
- targets:
|
||||||
|
- "127.0.0.1:8090"
|
||||||
|
labels:
|
||||||
|
service: "mock-coordinator"
|
||||||
|
|
||||||
|
rule_files:
|
||||||
|
- alerts.yml
|
||||||
|
- gossip-recording-rules.yml
|
||||||
@ -3,9 +3,14 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import random
|
||||||
|
import time
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
|
from fastapi.responses import PlainTextResponse
|
||||||
|
|
||||||
|
from aitbc_chain.metrics import metrics_registry
|
||||||
|
|
||||||
app = FastAPI(title="Mock Coordinator API", version="0.1.0")
|
app = FastAPI(title="Mock Coordinator API", version="0.1.0")
|
||||||
|
|
||||||
@ -15,6 +20,17 @@ MOCK_JOBS: Dict[str, Dict[str, str]] = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _simulate_miner_metrics() -> None:
|
||||||
|
metrics_registry.set_gauge("miner_active_jobs", float(random.randint(0, 5)))
|
||||||
|
metrics_registry.set_gauge("miner_error_rate", float(random.randint(0, 1)))
|
||||||
|
metrics_registry.observe("miner_job_duration_seconds", random.uniform(1.0, 5.0))
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def _startup() -> None:
|
||||||
|
_simulate_miner_metrics()
|
||||||
|
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
def health() -> Dict[str, str]:
|
def health() -> Dict[str, str]:
|
||||||
return {"status": "ok"}
|
return {"status": "ok"}
|
||||||
@ -24,15 +40,23 @@ def health() -> Dict[str, str]:
|
|||||||
def attest_receipt(payload: Dict[str, str]) -> Dict[str, str | bool]:
|
def attest_receipt(payload: Dict[str, str]) -> Dict[str, str | bool]:
|
||||||
job_id = payload.get("job_id")
|
job_id = payload.get("job_id")
|
||||||
if job_id in MOCK_JOBS:
|
if job_id in MOCK_JOBS:
|
||||||
|
metrics_registry.increment("miner_receipts_attested_total")
|
||||||
return {
|
return {
|
||||||
"exists": True,
|
"exists": True,
|
||||||
"paid": True,
|
"paid": True,
|
||||||
"not_double_spent": True,
|
"not_double_spent": True,
|
||||||
"quote": MOCK_JOBS[job_id],
|
"quote": MOCK_JOBS[job_id],
|
||||||
}
|
}
|
||||||
|
metrics_registry.increment("miner_receipts_unknown_total")
|
||||||
return {
|
return {
|
||||||
"exists": False,
|
"exists": False,
|
||||||
"paid": False,
|
"paid": False,
|
||||||
"not_double_spent": False,
|
"not_double_spent": False,
|
||||||
"quote": {},
|
"quote": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/metrics", response_class=PlainTextResponse)
|
||||||
|
def metrics() -> str:
|
||||||
|
metrics_registry.observe("miner_metrics_scrape_duration_seconds", random.uniform(0.001, 0.01))
|
||||||
|
return metrics_registry.render_prometheus()
|
||||||
|
|||||||
224
apps/blockchain-node/scripts/ws_load_test.py
Normal file
224
apps/blockchain-node/scripts/ws_load_test.py
Normal file
@ -0,0 +1,224 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Asynchronous load harness for blockchain-node WebSocket + gossip pipeline."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import websockets
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PublishStats:
|
||||||
|
sent: int = 0
|
||||||
|
failed: int = 0
|
||||||
|
latencies: List[float] = field(default_factory=list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def average_latency_ms(self) -> Optional[float]:
|
||||||
|
if not self.latencies:
|
||||||
|
return None
|
||||||
|
return (sum(self.latencies) / len(self.latencies)) * 1000.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def p95_latency_ms(self) -> Optional[float]:
|
||||||
|
if not self.latencies:
|
||||||
|
return None
|
||||||
|
sorted_latencies = sorted(self.latencies)
|
||||||
|
index = int(len(sorted_latencies) * 0.95)
|
||||||
|
index = min(index, len(sorted_latencies) - 1)
|
||||||
|
return sorted_latencies[index] * 1000.0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SubscriptionStats:
|
||||||
|
messages: int = 0
|
||||||
|
disconnects: int = 0
|
||||||
|
|
||||||
|
|
||||||
|
async def _publish_transactions(
|
||||||
|
base_url: str,
|
||||||
|
stats: PublishStats,
|
||||||
|
stop_event: asyncio.Event,
|
||||||
|
rate_hz: float,
|
||||||
|
job_id: str,
|
||||||
|
client_id: str,
|
||||||
|
timeout: float,
|
||||||
|
) -> None:
|
||||||
|
interval = 1 / rate_hz if rate_hz > 0 else 0
|
||||||
|
async with httpx.AsyncClient(base_url=base_url, timeout=timeout) as client:
|
||||||
|
while not stop_event.is_set():
|
||||||
|
payload = {
|
||||||
|
"type": "TRANSFER",
|
||||||
|
"sender": f"miner-{client_id}",
|
||||||
|
"nonce": stats.sent,
|
||||||
|
"fee": 1,
|
||||||
|
"payload": {
|
||||||
|
"job_id": job_id,
|
||||||
|
"amount": random.randint(1, 10),
|
||||||
|
"timestamp": time.time_ns(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
started = time.perf_counter()
|
||||||
|
try:
|
||||||
|
response = await client.post("/rpc/sendTx", json=payload)
|
||||||
|
response.raise_for_status()
|
||||||
|
except httpx.HTTPError:
|
||||||
|
stats.failed += 1
|
||||||
|
else:
|
||||||
|
stats.sent += 1
|
||||||
|
stats.latencies.append(time.perf_counter() - started)
|
||||||
|
|
||||||
|
if interval:
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(stop_event.wait(), timeout=interval)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
|
||||||
|
|
||||||
|
async def _subscription_worker(
|
||||||
|
websocket_url: str,
|
||||||
|
stats: SubscriptionStats,
|
||||||
|
stop_event: asyncio.Event,
|
||||||
|
client_name: str,
|
||||||
|
) -> None:
|
||||||
|
while not stop_event.is_set():
|
||||||
|
try:
|
||||||
|
async with websockets.connect(websocket_url) as ws:
|
||||||
|
while not stop_event.is_set():
|
||||||
|
try:
|
||||||
|
message = await asyncio.wait_for(ws.recv(), timeout=1.0)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
|
except websockets.ConnectionClosed:
|
||||||
|
stats.disconnects += 1
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
_ = message # lightweight backpressure test only
|
||||||
|
stats.messages += 1
|
||||||
|
except OSError:
|
||||||
|
stats.disconnects += 1
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
|
|
||||||
|
|
||||||
|
async def run_load(args: argparse.Namespace) -> None:
|
||||||
|
stop_event = asyncio.Event()
|
||||||
|
|
||||||
|
publish_stats: List[PublishStats] = [PublishStats() for _ in range(args.publishers)]
|
||||||
|
subscription_stats: Dict[str, SubscriptionStats] = {
|
||||||
|
"blocks": SubscriptionStats(),
|
||||||
|
"transactions": SubscriptionStats(),
|
||||||
|
}
|
||||||
|
|
||||||
|
publisher_tasks = [
|
||||||
|
asyncio.create_task(
|
||||||
|
_publish_transactions(
|
||||||
|
base_url=args.http_base,
|
||||||
|
stats=publish_stats[i],
|
||||||
|
stop_event=stop_event,
|
||||||
|
rate_hz=args.publish_rate,
|
||||||
|
job_id=f"load-test-job-{i}",
|
||||||
|
client_id=f"{i}",
|
||||||
|
timeout=args.http_timeout,
|
||||||
|
),
|
||||||
|
name=f"publisher-{i}",
|
||||||
|
)
|
||||||
|
for i in range(args.publishers)
|
||||||
|
]
|
||||||
|
|
||||||
|
subscriber_tasks = [
|
||||||
|
asyncio.create_task(
|
||||||
|
_subscription_worker(
|
||||||
|
websocket_url=f"{args.ws_base}/blocks",
|
||||||
|
stats=subscription_stats["blocks"],
|
||||||
|
stop_event=stop_event,
|
||||||
|
client_name="blocks",
|
||||||
|
),
|
||||||
|
name="subscriber-blocks",
|
||||||
|
),
|
||||||
|
asyncio.create_task(
|
||||||
|
_subscription_worker(
|
||||||
|
websocket_url=f"{args.ws_base}/transactions",
|
||||||
|
stats=subscription_stats["transactions"],
|
||||||
|
stop_event=stop_event,
|
||||||
|
client_name="transactions",
|
||||||
|
),
|
||||||
|
name="subscriber-transactions",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
all_tasks = publisher_tasks + subscriber_tasks
|
||||||
|
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(stop_event.wait(), timeout=args.duration)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
stop_event.set()
|
||||||
|
await asyncio.gather(*all_tasks, return_exceptions=True)
|
||||||
|
|
||||||
|
_print_summary(publish_stats, subscription_stats)
|
||||||
|
|
||||||
|
|
||||||
|
def _print_summary(publish_stats: List[PublishStats], subscription_stats: Dict[str, SubscriptionStats]) -> None:
|
||||||
|
total_sent = sum(s.sent for s in publish_stats)
|
||||||
|
total_failed = sum(s.failed for s in publish_stats)
|
||||||
|
all_latencies = [lat for s in publish_stats for lat in s.latencies]
|
||||||
|
|
||||||
|
summary = {
|
||||||
|
"publish": {
|
||||||
|
"total_sent": total_sent,
|
||||||
|
"total_failed": total_failed,
|
||||||
|
"average_latency_ms": (sum(all_latencies) / len(all_latencies) * 1000.0) if all_latencies else None,
|
||||||
|
"p95_latency_ms": _p95(all_latencies),
|
||||||
|
},
|
||||||
|
"subscriptions": {
|
||||||
|
name: {
|
||||||
|
"messages": stats.messages,
|
||||||
|
"disconnects": stats.disconnects,
|
||||||
|
}
|
||||||
|
for name, stats in subscription_stats.items()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
print(json.dumps(summary, indent=2))
|
||||||
|
|
||||||
|
|
||||||
|
def _p95(latencies: List[float]) -> Optional[float]:
|
||||||
|
if not latencies:
|
||||||
|
return None
|
||||||
|
sorted_latencies = sorted(latencies)
|
||||||
|
index = int(len(sorted_latencies) * 0.95)
|
||||||
|
index = min(index, len(sorted_latencies) - 1)
|
||||||
|
return sorted_latencies[index] * 1000.0
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description="AITBC blockchain-node WebSocket load harness")
|
||||||
|
parser.add_argument("--http-base", default="http://127.0.0.1:8080", help="Base URL for REST API")
|
||||||
|
parser.add_argument("--ws-base", default="ws://127.0.0.1:8080/rpc/ws", help="Base URL for WebSocket API")
|
||||||
|
parser.add_argument("--duration", type=float, default=30.0, help="Duration in seconds")
|
||||||
|
parser.add_argument("--publishers", type=int, default=4, help="Concurrent transaction publishers")
|
||||||
|
parser.add_argument("--publish-rate", type=float, default=5.0, help="Transactions per second per publisher")
|
||||||
|
parser.add_argument("--http-timeout", type=float, default=5.0, help="HTTP client timeout in seconds")
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
args = _parse_args()
|
||||||
|
try:
|
||||||
|
asyncio.run(run_load(args))
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@ -1,24 +1,35 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
from fastapi import APIRouter, FastAPI
|
from fastapi import APIRouter, FastAPI
|
||||||
from fastapi.responses import PlainTextResponse
|
from fastapi.responses import PlainTextResponse
|
||||||
|
|
||||||
from .config import settings
|
from .config import settings
|
||||||
from .database import init_db
|
from .database import init_db
|
||||||
|
from .gossip import create_backend, gossip_broker
|
||||||
from .metrics import metrics_registry
|
from .metrics import metrics_registry
|
||||||
from .rpc.router import router as rpc_router
|
from .rpc.router import router as rpc_router
|
||||||
|
from .rpc.websocket import router as websocket_router
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(app: FastAPI):
|
||||||
init_db()
|
init_db()
|
||||||
|
backend = create_backend(
|
||||||
|
settings.gossip_backend,
|
||||||
|
broadcast_url=settings.gossip_broadcast_url,
|
||||||
|
)
|
||||||
|
await gossip_broker.set_backend(backend)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
await gossip_broker.shutdown()
|
||||||
|
|
||||||
|
|
||||||
def create_app() -> FastAPI:
|
def create_app() -> FastAPI:
|
||||||
app = FastAPI(title="AITBC Blockchain Node", version="0.1.0", lifespan=lifespan)
|
app = FastAPI(title="AITBC Blockchain Node", version="0.1.0", lifespan=lifespan)
|
||||||
app.include_router(rpc_router, prefix="/rpc", tags=["rpc"])
|
app.include_router(rpc_router, prefix="/rpc", tags=["rpc"])
|
||||||
|
app.include_router(websocket_router, prefix="/rpc")
|
||||||
metrics_router = APIRouter()
|
metrics_router = APIRouter()
|
||||||
|
|
||||||
@metrics_router.get("/metrics", response_class=PlainTextResponse, tags=["metrics"], summary="Prometheus metrics")
|
@metrics_router.get("/metrics", response_class=PlainTextResponse, tags=["metrics"], summary="Prometheus metrics")
|
||||||
|
|||||||
@ -26,5 +26,8 @@ class ChainSettings(BaseSettings):
|
|||||||
|
|
||||||
block_time_seconds: int = 2
|
block_time_seconds: int = 2
|
||||||
|
|
||||||
|
gossip_backend: str = "memory"
|
||||||
|
gossip_broadcast_url: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
settings = ChainSettings()
|
settings = ChainSettings()
|
||||||
|
|||||||
@ -11,6 +11,7 @@ from sqlmodel import Session, select
|
|||||||
from ..logging import get_logger
|
from ..logging import get_logger
|
||||||
from ..metrics import metrics_registry
|
from ..metrics import metrics_registry
|
||||||
from ..models import Block
|
from ..models import Block
|
||||||
|
from ..gossip import gossip_broker
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -78,9 +79,11 @@ class PoAProposer:
|
|||||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
next_height = 0
|
next_height = 0
|
||||||
parent_hash = "0x00"
|
parent_hash = "0x00"
|
||||||
|
interval_seconds: Optional[float] = None
|
||||||
if head is not None:
|
if head is not None:
|
||||||
next_height = head.height + 1
|
next_height = head.height + 1
|
||||||
parent_hash = head.hash
|
parent_hash = head.hash
|
||||||
|
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
||||||
|
|
||||||
timestamp = datetime.utcnow()
|
timestamp = datetime.utcnow()
|
||||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
||||||
@ -99,6 +102,21 @@ class PoAProposer:
|
|||||||
|
|
||||||
metrics_registry.increment("blocks_proposed_total")
|
metrics_registry.increment("blocks_proposed_total")
|
||||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
||||||
|
if interval_seconds is not None and interval_seconds >= 0:
|
||||||
|
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
||||||
|
|
||||||
|
asyncio.create_task(
|
||||||
|
gossip_broker.publish(
|
||||||
|
"blocks",
|
||||||
|
{
|
||||||
|
"height": block.height,
|
||||||
|
"hash": block.hash,
|
||||||
|
"parent_hash": block.parent_hash,
|
||||||
|
"timestamp": block.timestamp.isoformat(),
|
||||||
|
"tx_count": block.tx_count,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._logger.info(
|
self._logger.info(
|
||||||
"Proposed block",
|
"Proposed block",
|
||||||
@ -129,6 +147,19 @@ class PoAProposer:
|
|||||||
)
|
)
|
||||||
session.add(genesis)
|
session.add(genesis)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
asyncio.create_task(
|
||||||
|
gossip_broker.publish(
|
||||||
|
"blocks",
|
||||||
|
{
|
||||||
|
"height": genesis.height,
|
||||||
|
"hash": genesis.hash,
|
||||||
|
"parent_hash": genesis.parent_hash,
|
||||||
|
"timestamp": genesis.timestamp.isoformat(),
|
||||||
|
"tx_count": genesis.tx_count,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._logger.info("Created genesis block", extra={"hash": genesis_hash})
|
self._logger.info("Created genesis block", extra={"hash": genesis_hash})
|
||||||
|
|
||||||
def _fetch_chain_head(self) -> Optional[Block]:
|
def _fetch_chain_head(self) -> Optional[Block]:
|
||||||
|
|||||||
17
apps/blockchain-node/src/aitbc_chain/gossip/__init__.py
Normal file
17
apps/blockchain-node/src/aitbc_chain/gossip/__init__.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from .broker import (
|
||||||
|
BroadcastGossipBackend,
|
||||||
|
GossipBroker,
|
||||||
|
InMemoryGossipBackend,
|
||||||
|
TopicSubscription,
|
||||||
|
create_backend,
|
||||||
|
gossip_broker,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BroadcastGossipBackend",
|
||||||
|
"GossipBroker",
|
||||||
|
"InMemoryGossipBackend",
|
||||||
|
"TopicSubscription",
|
||||||
|
"create_backend",
|
||||||
|
"gossip_broker",
|
||||||
|
]
|
||||||
254
apps/blockchain-node/src/aitbc_chain/gossip/broker.py
Normal file
254
apps/blockchain-node/src/aitbc_chain/gossip/broker.py
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
from collections import defaultdict
|
||||||
|
from contextlib import suppress
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Callable, Dict, List, Optional, Set
|
||||||
|
|
||||||
|
try:
|
||||||
|
from starlette.broadcast import Broadcast
|
||||||
|
except ImportError: # pragma: no cover - Starlette is an indirect dependency of FastAPI
|
||||||
|
Broadcast = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
from ..metrics import metrics_registry
|
||||||
|
|
||||||
|
|
||||||
|
def _increment_publication(metric_prefix: str, topic: str) -> None:
|
||||||
|
metrics_registry.increment(f"{metric_prefix}_total")
|
||||||
|
metrics_registry.increment(f"{metric_prefix}_topic_{topic}")
|
||||||
|
|
||||||
|
|
||||||
|
def _set_queue_gauge(topic: str, size: int) -> None:
|
||||||
|
metrics_registry.set_gauge(f"gossip_queue_size_{topic}", float(size))
|
||||||
|
|
||||||
|
|
||||||
|
def _update_subscriber_metrics(topics: Dict[str, List["asyncio.Queue[Any]"]]) -> None:
|
||||||
|
for topic, queues in topics.items():
|
||||||
|
metrics_registry.set_gauge(f"gossip_subscribers_topic_{topic}", float(len(queues)))
|
||||||
|
total = sum(len(queues) for queues in topics.values())
|
||||||
|
metrics_registry.set_gauge("gossip_subscribers_total", float(total))
|
||||||
|
|
||||||
|
|
||||||
|
def _clear_topic_metrics(topic: str) -> None:
|
||||||
|
metrics_registry.set_gauge(f"gossip_subscribers_topic_{topic}", 0.0)
|
||||||
|
_set_queue_gauge(topic, 0)
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TopicSubscription:
|
||||||
|
topic: str
|
||||||
|
queue: "asyncio.Queue[Any]"
|
||||||
|
_unsubscribe: Callable[[], None]
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
self._unsubscribe()
|
||||||
|
|
||||||
|
async def get(self) -> Any:
|
||||||
|
return await self.queue.get()
|
||||||
|
|
||||||
|
async def __aiter__(self): # type: ignore[override]
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
yield await self.queue.get()
|
||||||
|
finally:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
|
||||||
|
class GossipBackend:
|
||||||
|
async def start(self) -> None: # pragma: no cover - overridden as needed
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def publish(self, topic: str, message: Any) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class InMemoryGossipBackend(GossipBackend):
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._topics: Dict[str, List["asyncio.Queue[Any]"]] = defaultdict(list)
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
|
||||||
|
async def publish(self, topic: str, message: Any) -> None:
|
||||||
|
async with self._lock:
|
||||||
|
queues = list(self._topics.get(topic, []))
|
||||||
|
for queue in queues:
|
||||||
|
await queue.put(message)
|
||||||
|
_set_queue_gauge(topic, queue.qsize())
|
||||||
|
_increment_publication("gossip_publications", topic)
|
||||||
|
|
||||||
|
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||||
|
queue: "asyncio.Queue[Any]" = asyncio.Queue(maxsize=max_queue_size)
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
self._topics[topic].append(queue)
|
||||||
|
_update_subscriber_metrics(self._topics)
|
||||||
|
|
||||||
|
_set_queue_gauge(topic, queue.qsize())
|
||||||
|
|
||||||
|
def _unsubscribe() -> None:
|
||||||
|
async def _remove() -> None:
|
||||||
|
async with self._lock:
|
||||||
|
queues = self._topics.get(topic)
|
||||||
|
if queues is None:
|
||||||
|
return
|
||||||
|
if queue in queues:
|
||||||
|
queues.remove(queue)
|
||||||
|
if not queues:
|
||||||
|
self._topics.pop(topic, None)
|
||||||
|
_clear_topic_metrics(topic)
|
||||||
|
_update_subscriber_metrics(self._topics)
|
||||||
|
|
||||||
|
asyncio.create_task(_remove())
|
||||||
|
|
||||||
|
return TopicSubscription(topic=topic, queue=queue, _unsubscribe=_unsubscribe)
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
async with self._lock:
|
||||||
|
topics = list(self._topics.keys())
|
||||||
|
self._topics.clear()
|
||||||
|
for topic in topics:
|
||||||
|
_clear_topic_metrics(topic)
|
||||||
|
_update_subscriber_metrics(self._topics)
|
||||||
|
|
||||||
|
|
||||||
|
class BroadcastGossipBackend(GossipBackend):
|
||||||
|
def __init__(self, url: str) -> None:
|
||||||
|
if Broadcast is None: # pragma: no cover - dependency is optional
|
||||||
|
raise RuntimeError("Starlette Broadcast backend requested but starlette is not available")
|
||||||
|
self._broadcast = Broadcast(url) # type: ignore[arg-type]
|
||||||
|
self._tasks: Set[asyncio.Task[None]] = set()
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._running = False
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
if not self._running:
|
||||||
|
await self._broadcast.connect() # type: ignore[union-attr]
|
||||||
|
self._running = True
|
||||||
|
|
||||||
|
async def publish(self, topic: str, message: Any) -> None:
|
||||||
|
if not self._running:
|
||||||
|
raise RuntimeError("Broadcast backend not started")
|
||||||
|
payload = _encode_message(message)
|
||||||
|
await self._broadcast.publish(topic, payload) # type: ignore[union-attr]
|
||||||
|
_increment_publication("gossip_broadcast_publications", topic)
|
||||||
|
|
||||||
|
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||||
|
if not self._running:
|
||||||
|
raise RuntimeError("Broadcast backend not started")
|
||||||
|
|
||||||
|
queue: "asyncio.Queue[Any]" = asyncio.Queue(maxsize=max_queue_size)
|
||||||
|
stop_event = asyncio.Event()
|
||||||
|
|
||||||
|
async def _run_subscription() -> None:
|
||||||
|
async with self._broadcast.subscribe(topic) as subscriber: # type: ignore[attr-defined,union-attr]
|
||||||
|
async for event in subscriber: # type: ignore[union-attr]
|
||||||
|
if stop_event.is_set():
|
||||||
|
break
|
||||||
|
data = _decode_message(getattr(event, "message", event))
|
||||||
|
try:
|
||||||
|
await queue.put(data)
|
||||||
|
_set_queue_gauge(topic, queue.qsize())
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
break
|
||||||
|
|
||||||
|
task = asyncio.create_task(_run_subscription(), name=f"broadcast-sub:{topic}")
|
||||||
|
async with self._lock:
|
||||||
|
self._tasks.add(task)
|
||||||
|
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", float(len(self._tasks)))
|
||||||
|
|
||||||
|
def _unsubscribe() -> None:
|
||||||
|
async def _stop() -> None:
|
||||||
|
stop_event.set()
|
||||||
|
task.cancel()
|
||||||
|
with suppress(asyncio.CancelledError):
|
||||||
|
await task
|
||||||
|
async with self._lock:
|
||||||
|
self._tasks.discard(task)
|
||||||
|
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", float(len(self._tasks)))
|
||||||
|
|
||||||
|
asyncio.create_task(_stop())
|
||||||
|
|
||||||
|
return TopicSubscription(topic=topic, queue=queue, _unsubscribe=_unsubscribe)
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
async with self._lock:
|
||||||
|
tasks = list(self._tasks)
|
||||||
|
self._tasks.clear()
|
||||||
|
metrics_registry.set_gauge("gossip_broadcast_subscribers_total", 0.0)
|
||||||
|
for task in tasks:
|
||||||
|
task.cancel()
|
||||||
|
with suppress(asyncio.CancelledError):
|
||||||
|
await task
|
||||||
|
if self._running:
|
||||||
|
await self._broadcast.disconnect() # type: ignore[union-attr]
|
||||||
|
self._running = False
|
||||||
|
|
||||||
|
|
||||||
|
class GossipBroker:
|
||||||
|
def __init__(self, backend: GossipBackend) -> None:
|
||||||
|
self._backend = backend
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._started = False
|
||||||
|
|
||||||
|
async def publish(self, topic: str, message: Any) -> None:
|
||||||
|
if not self._started:
|
||||||
|
await self._backend.start()
|
||||||
|
self._started = True
|
||||||
|
await self._backend.publish(topic, message)
|
||||||
|
|
||||||
|
async def subscribe(self, topic: str, max_queue_size: int = 100) -> TopicSubscription:
|
||||||
|
if not self._started:
|
||||||
|
await self._backend.start()
|
||||||
|
self._started = True
|
||||||
|
return await self._backend.subscribe(topic, max_queue_size=max_queue_size)
|
||||||
|
|
||||||
|
async def set_backend(self, backend: GossipBackend) -> None:
|
||||||
|
await backend.start()
|
||||||
|
async with self._lock:
|
||||||
|
previous = self._backend
|
||||||
|
self._backend = backend
|
||||||
|
self._started = True
|
||||||
|
await previous.shutdown()
|
||||||
|
|
||||||
|
async def shutdown(self) -> None:
|
||||||
|
await self._backend.shutdown()
|
||||||
|
self._started = False
|
||||||
|
metrics_registry.set_gauge("gossip_subscribers_total", 0.0)
|
||||||
|
|
||||||
|
|
||||||
|
def create_backend(backend_type: str, *, broadcast_url: Optional[str] = None) -> GossipBackend:
|
||||||
|
backend = backend_type.lower()
|
||||||
|
if backend in {"memory", "inmemory", "local"}:
|
||||||
|
return InMemoryGossipBackend()
|
||||||
|
if backend in {"broadcast", "starlette", "redis"}:
|
||||||
|
if not broadcast_url:
|
||||||
|
raise ValueError("Broadcast backend requires a gossip_broadcast_url setting")
|
||||||
|
return BroadcastGossipBackend(broadcast_url)
|
||||||
|
raise ValueError(f"Unsupported gossip backend '{backend_type}'")
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_message(message: Any) -> Any:
|
||||||
|
if isinstance(message, (str, bytes, bytearray)):
|
||||||
|
return message
|
||||||
|
return json.dumps(message, separators=(",", ":"))
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_message(message: Any) -> Any:
|
||||||
|
if isinstance(message, (bytes, bytearray)):
|
||||||
|
message = message.decode("utf-8")
|
||||||
|
if isinstance(message, str):
|
||||||
|
try:
|
||||||
|
return json.loads(message)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return message
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
gossip_broker = GossipBroker(InMemoryGossipBackend())
|
||||||
|
|
||||||
@ -15,6 +15,7 @@ class MetricsRegistry:
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._counters: Dict[str, float] = {}
|
self._counters: Dict[str, float] = {}
|
||||||
self._gauges: Dict[str, float] = {}
|
self._gauges: Dict[str, float] = {}
|
||||||
|
self._summaries: Dict[str, tuple[float, float]] = {}
|
||||||
self._lock = Lock()
|
self._lock = Lock()
|
||||||
|
|
||||||
def increment(self, name: str, amount: float = 1.0) -> None:
|
def increment(self, name: str, amount: float = 1.0) -> None:
|
||||||
@ -25,6 +26,17 @@ class MetricsRegistry:
|
|||||||
with self._lock:
|
with self._lock:
|
||||||
self._gauges[name] = value
|
self._gauges[name] = value
|
||||||
|
|
||||||
|
def observe(self, name: str, value: float) -> None:
|
||||||
|
with self._lock:
|
||||||
|
count, total = self._summaries.get(name, (0.0, 0.0))
|
||||||
|
self._summaries[name] = (count + 1.0, total + value)
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
with self._lock:
|
||||||
|
self._counters.clear()
|
||||||
|
self._gauges.clear()
|
||||||
|
self._summaries.clear()
|
||||||
|
|
||||||
def render_prometheus(self) -> str:
|
def render_prometheus(self) -> str:
|
||||||
with self._lock:
|
with self._lock:
|
||||||
lines: list[str] = []
|
lines: list[str] = []
|
||||||
@ -34,6 +46,10 @@ class MetricsRegistry:
|
|||||||
for name, value in sorted(self._gauges.items()):
|
for name, value in sorted(self._gauges.items()):
|
||||||
lines.append(f"# TYPE {name} gauge")
|
lines.append(f"# TYPE {name} gauge")
|
||||||
lines.append(f"{name} {value}")
|
lines.append(f"{name} {value}")
|
||||||
|
for name, (count, total) in sorted(self._summaries.items()):
|
||||||
|
lines.append(f"# TYPE {name} summary")
|
||||||
|
lines.append(f"{name}_count {count}")
|
||||||
|
lines.append(f"{name}_sum {total}")
|
||||||
return "\n".join(lines) + "\n"
|
return "\n".join(lines) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, status
|
from fastapi import APIRouter, HTTPException, status
|
||||||
@ -8,6 +10,7 @@ from pydantic import BaseModel, Field, model_validator
|
|||||||
from sqlmodel import select
|
from sqlmodel import select
|
||||||
|
|
||||||
from ..database import session_scope
|
from ..database import session_scope
|
||||||
|
from ..gossip import gossip_broker
|
||||||
from ..mempool import get_mempool
|
from ..mempool import get_mempool
|
||||||
from ..metrics import metrics_registry
|
from ..metrics import metrics_registry
|
||||||
from ..models import Account, Block, Receipt, Transaction
|
from ..models import Account, Block, Receipt, Transaction
|
||||||
@ -64,10 +67,15 @@ class MintFaucetRequest(BaseModel):
|
|||||||
|
|
||||||
@router.get("/head", summary="Get current chain head")
|
@router.get("/head", summary="Get current chain head")
|
||||||
async def get_head() -> Dict[str, Any]:
|
async def get_head() -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_get_head_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
if result is None:
|
if result is None:
|
||||||
|
metrics_registry.increment("rpc_get_head_not_found_total")
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
||||||
|
metrics_registry.increment("rpc_get_head_success_total")
|
||||||
|
metrics_registry.observe("rpc_get_head_duration_seconds", time.perf_counter() - start)
|
||||||
return {
|
return {
|
||||||
"height": result.height,
|
"height": result.height,
|
||||||
"hash": result.hash,
|
"hash": result.hash,
|
||||||
@ -78,10 +86,15 @@ async def get_head() -> Dict[str, Any]:
|
|||||||
|
|
||||||
@router.get("/blocks/{height}", summary="Get block by height")
|
@router.get("/blocks/{height}", summary="Get block by height")
|
||||||
async def get_block(height: int) -> Dict[str, Any]:
|
async def get_block(height: int) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_get_block_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
block = session.exec(select(Block).where(Block.height == height)).first()
|
block = session.exec(select(Block).where(Block.height == height)).first()
|
||||||
if block is None:
|
if block is None:
|
||||||
|
metrics_registry.increment("rpc_get_block_not_found_total")
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="block not found")
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="block not found")
|
||||||
|
metrics_registry.increment("rpc_get_block_success_total")
|
||||||
|
metrics_registry.observe("rpc_get_block_duration_seconds", time.perf_counter() - start)
|
||||||
return {
|
return {
|
||||||
"height": block.height,
|
"height": block.height,
|
||||||
"hash": block.hash,
|
"hash": block.hash,
|
||||||
@ -94,10 +107,15 @@ async def get_block(height: int) -> Dict[str, Any]:
|
|||||||
|
|
||||||
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
||||||
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_get_transaction_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
||||||
if tx is None:
|
if tx is None:
|
||||||
|
metrics_registry.increment("rpc_get_transaction_not_found_total")
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
||||||
|
metrics_registry.increment("rpc_get_transaction_success_total")
|
||||||
|
metrics_registry.observe("rpc_get_transaction_duration_seconds", time.perf_counter() - start)
|
||||||
return {
|
return {
|
||||||
"tx_hash": tx.tx_hash,
|
"tx_hash": tx.tx_hash,
|
||||||
"block_height": tx.block_height,
|
"block_height": tx.block_height,
|
||||||
@ -110,19 +128,30 @@ async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
|||||||
|
|
||||||
@router.get("/receipts/{receipt_id}", summary="Get receipt by ID")
|
@router.get("/receipts/{receipt_id}", summary="Get receipt by ID")
|
||||||
async def get_receipt(receipt_id: str) -> Dict[str, Any]:
|
async def get_receipt(receipt_id: str) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_get_receipt_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
receipt = session.exec(select(Receipt).where(Receipt.receipt_id == receipt_id)).first()
|
receipt = session.exec(select(Receipt).where(Receipt.receipt_id == receipt_id)).first()
|
||||||
if receipt is None:
|
if receipt is None:
|
||||||
|
metrics_registry.increment("rpc_get_receipt_not_found_total")
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not found")
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not found")
|
||||||
|
metrics_registry.increment("rpc_get_receipt_success_total")
|
||||||
|
metrics_registry.observe("rpc_get_receipt_duration_seconds", time.perf_counter() - start)
|
||||||
return _serialize_receipt(receipt)
|
return _serialize_receipt(receipt)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/getBalance/{address}", summary="Get account balance")
|
@router.get("/getBalance/{address}", summary="Get account balance")
|
||||||
async def get_balance(address: str) -> Dict[str, Any]:
|
async def get_balance(address: str) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_get_balance_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
account = session.get(Account, address)
|
account = session.get(Account, address)
|
||||||
if account is None:
|
if account is None:
|
||||||
|
metrics_registry.increment("rpc_get_balance_empty_total")
|
||||||
|
metrics_registry.observe("rpc_get_balance_duration_seconds", time.perf_counter() - start)
|
||||||
return {"address": address, "balance": 0, "nonce": 0}
|
return {"address": address, "balance": 0, "nonce": 0}
|
||||||
|
metrics_registry.increment("rpc_get_balance_success_total")
|
||||||
|
metrics_registry.observe("rpc_get_balance_duration_seconds", time.perf_counter() - start)
|
||||||
return {
|
return {
|
||||||
"address": account.address,
|
"address": account.address,
|
||||||
"balance": account.balance,
|
"balance": account.balance,
|
||||||
@ -133,15 +162,39 @@ async def get_balance(address: str) -> Dict[str, Any]:
|
|||||||
|
|
||||||
@router.post("/sendTx", summary="Submit a new transaction")
|
@router.post("/sendTx", summary="Submit a new transaction")
|
||||||
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_send_tx_total")
|
||||||
|
start = time.perf_counter()
|
||||||
mempool = get_mempool()
|
mempool = get_mempool()
|
||||||
tx_dict = request.model_dump()
|
tx_dict = request.model_dump()
|
||||||
tx_hash = mempool.add(tx_dict)
|
tx_hash = mempool.add(tx_dict)
|
||||||
metrics_registry.increment("rpc_send_tx_total")
|
try:
|
||||||
|
asyncio.create_task(
|
||||||
|
gossip_broker.publish(
|
||||||
|
"transactions",
|
||||||
|
{
|
||||||
|
"tx_hash": tx_hash,
|
||||||
|
"sender": request.sender,
|
||||||
|
"recipient": request.recipient,
|
||||||
|
"payload": request.payload,
|
||||||
|
"nonce": request.nonce,
|
||||||
|
"fee": request.fee,
|
||||||
|
"type": request.type,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
metrics_registry.increment("rpc_send_tx_success_total")
|
||||||
return {"tx_hash": tx_hash}
|
return {"tx_hash": tx_hash}
|
||||||
|
except Exception:
|
||||||
|
metrics_registry.increment("rpc_send_tx_failed_total")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
metrics_registry.observe("rpc_send_tx_duration_seconds", time.perf_counter() - start)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
||||||
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_submit_receipt_total")
|
||||||
|
start = time.perf_counter()
|
||||||
tx_payload = {
|
tx_payload = {
|
||||||
"type": "RECEIPT_CLAIM",
|
"type": "RECEIPT_CLAIM",
|
||||||
"sender": request.sender,
|
"sender": request.sender,
|
||||||
@ -151,17 +204,31 @@ async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
|||||||
"sig": request.sig,
|
"sig": request.sig,
|
||||||
}
|
}
|
||||||
tx_request = TransactionRequest.model_validate(tx_payload)
|
tx_request = TransactionRequest.model_validate(tx_payload)
|
||||||
metrics_registry.increment("rpc_submit_receipt_total")
|
try:
|
||||||
return await send_transaction(tx_request)
|
response = await send_transaction(tx_request)
|
||||||
|
metrics_registry.increment("rpc_submit_receipt_success_total")
|
||||||
|
return response
|
||||||
|
except HTTPException:
|
||||||
|
metrics_registry.increment("rpc_submit_receipt_failed_total")
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
metrics_registry.increment("rpc_submit_receipt_failed_total")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
metrics_registry.observe("rpc_submit_receipt_duration_seconds", time.perf_counter() - start)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/estimateFee", summary="Estimate transaction fee")
|
@router.post("/estimateFee", summary="Estimate transaction fee")
|
||||||
async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_estimate_fee_total")
|
||||||
|
start = time.perf_counter()
|
||||||
base_fee = 10
|
base_fee = 10
|
||||||
per_byte = 1
|
per_byte = 1
|
||||||
payload_bytes = len(json.dumps(request.payload, sort_keys=True, separators=(",", ":")).encode())
|
payload_bytes = len(json.dumps(request.payload, sort_keys=True, separators=(",", ":")).encode())
|
||||||
estimated_fee = base_fee + per_byte * payload_bytes
|
estimated_fee = base_fee + per_byte * payload_bytes
|
||||||
tx_type = (request.type or "TRANSFER").upper()
|
tx_type = (request.type or "TRANSFER").upper()
|
||||||
|
metrics_registry.increment("rpc_estimate_fee_success_total")
|
||||||
|
metrics_registry.observe("rpc_estimate_fee_duration_seconds", time.perf_counter() - start)
|
||||||
return {
|
return {
|
||||||
"type": tx_type,
|
"type": tx_type,
|
||||||
"base_fee": base_fee,
|
"base_fee": base_fee,
|
||||||
@ -172,6 +239,8 @@ async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
|||||||
|
|
||||||
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
||||||
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||||
|
metrics_registry.increment("rpc_mint_faucet_total")
|
||||||
|
start = time.perf_counter()
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
account = session.get(Account, request.address)
|
account = session.get(Account, request.address)
|
||||||
if account is None:
|
if account is None:
|
||||||
@ -181,4 +250,6 @@ async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
|||||||
account.balance += request.amount
|
account.balance += request.amount
|
||||||
session.commit()
|
session.commit()
|
||||||
updated_balance = account.balance
|
updated_balance = account.balance
|
||||||
|
metrics_registry.increment("rpc_mint_faucet_success_total")
|
||||||
|
metrics_registry.observe("rpc_mint_faucet_duration_seconds", time.perf_counter() - start)
|
||||||
return {"address": request.address, "balance": updated_balance}
|
return {"address": request.address, "balance": updated_balance}
|
||||||
|
|||||||
34
apps/blockchain-node/src/aitbc_chain/rpc/websocket.py
Normal file
34
apps/blockchain-node/src/aitbc_chain/rpc/websocket.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from typing import AsyncIterator, Dict
|
||||||
|
|
||||||
|
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||||
|
|
||||||
|
from ..gossip import gossip_broker
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/ws", tags=["ws"])
|
||||||
|
|
||||||
|
|
||||||
|
async def _stream_topic(topic: str, websocket: WebSocket) -> None:
|
||||||
|
subscription = await gossip_broker.subscribe(topic)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
message = await subscription.get()
|
||||||
|
await websocket.send_json(message)
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
subscription.close()
|
||||||
|
|
||||||
|
|
||||||
|
@router.websocket("/blocks")
|
||||||
|
async def blocks_stream(websocket: WebSocket) -> None:
|
||||||
|
await websocket.accept()
|
||||||
|
await _stream_topic("blocks", websocket)
|
||||||
|
|
||||||
|
|
||||||
|
@router.websocket("/transactions")
|
||||||
|
async def transactions_stream(websocket: WebSocket) -> None:
|
||||||
|
await websocket.accept()
|
||||||
|
await _stream_topic("transactions", websocket)
|
||||||
23
apps/blockchain-node/tests/conftest.py
Normal file
23
apps/blockchain-node/tests/conftest.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from sqlmodel import SQLModel, Session, create_engine
|
||||||
|
|
||||||
|
from aitbc_chain.models import Block, Transaction, Receipt # noqa: F401 - ensure models imported for metadata
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="engine")
|
||||||
|
def engine_fixture():
|
||||||
|
engine = create_engine("sqlite:///:memory:", connect_args={"check_same_thread": False})
|
||||||
|
SQLModel.metadata.create_all(engine)
|
||||||
|
try:
|
||||||
|
yield engine
|
||||||
|
finally:
|
||||||
|
SQLModel.metadata.drop_all(engine)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="session")
|
||||||
|
def session_fixture(engine):
|
||||||
|
with Session(engine) as session:
|
||||||
|
yield session
|
||||||
|
session.rollback()
|
||||||
76
apps/blockchain-node/tests/test_gossip_broadcast.py
Normal file
76
apps/blockchain-node/tests/test_gossip_broadcast.py
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from aitbc_chain.app import create_app
|
||||||
|
from aitbc_chain.gossip import BroadcastGossipBackend, InMemoryGossipBackend, gossip_broker
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
async def reset_broker_backend():
|
||||||
|
previous_backend = InMemoryGossipBackend()
|
||||||
|
await gossip_broker.set_backend(previous_backend)
|
||||||
|
yield
|
||||||
|
await gossip_broker.set_backend(InMemoryGossipBackend())
|
||||||
|
|
||||||
|
|
||||||
|
def _run_in_thread(fn):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return loop.run_in_executor(None, fn)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_websocket_fanout_with_broadcast_backend():
|
||||||
|
backend = BroadcastGossipBackend("memory://")
|
||||||
|
await gossip_broker.set_backend(backend)
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
def _sync_test() -> None:
|
||||||
|
with TestClient(app) as client:
|
||||||
|
with client.websocket_connect("/rpc/ws/transactions") as ws_a, client.websocket_connect(
|
||||||
|
"/rpc/ws/transactions"
|
||||||
|
) as ws_b:
|
||||||
|
payload = {
|
||||||
|
"tx_hash": "0x01",
|
||||||
|
"sender": "alice",
|
||||||
|
"recipient": "bob",
|
||||||
|
"payload": {"amount": 1},
|
||||||
|
"nonce": 0,
|
||||||
|
"fee": 0,
|
||||||
|
"type": "TRANSFER",
|
||||||
|
}
|
||||||
|
fut = asyncio.run_coroutine_threadsafe(gossip_broker.publish("transactions", payload), loop)
|
||||||
|
fut.result(timeout=5.0)
|
||||||
|
assert ws_a.receive_json() == payload
|
||||||
|
assert ws_b.receive_json() == payload
|
||||||
|
|
||||||
|
await _run_in_thread(_sync_test)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_broadcast_backend_decodes_cursorless_payload():
|
||||||
|
backend = BroadcastGossipBackend("memory://")
|
||||||
|
await gossip_broker.set_backend(backend)
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
|
def _sync_test() -> None:
|
||||||
|
with TestClient(app) as client:
|
||||||
|
with client.websocket_connect("/rpc/ws/blocks") as ws:
|
||||||
|
payload = [
|
||||||
|
{"height": 1, "hash": "0xabc"},
|
||||||
|
{"height": 2, "hash": "0xdef"},
|
||||||
|
]
|
||||||
|
fut = asyncio.run_coroutine_threadsafe(gossip_broker.publish("blocks", payload), loop)
|
||||||
|
fut.result(timeout=5.0)
|
||||||
|
assert ws.receive_json() == payload
|
||||||
|
|
||||||
|
await _run_in_thread(_sync_test)
|
||||||
92
apps/blockchain-node/tests/test_models.py
Normal file
92
apps/blockchain-node/tests/test_models.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from aitbc_chain.models import Block, Transaction, Receipt
|
||||||
|
|
||||||
|
|
||||||
|
def _insert_block(session: Session, height: int = 0) -> Block:
|
||||||
|
block = Block(
|
||||||
|
height=height,
|
||||||
|
hash=f"0x{'0'*63}{height}",
|
||||||
|
parent_hash="0x" + "0" * 64,
|
||||||
|
proposer="validator",
|
||||||
|
tx_count=0,
|
||||||
|
)
|
||||||
|
session.add(block)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(block)
|
||||||
|
return block
|
||||||
|
|
||||||
|
|
||||||
|
def test_relationships(session: Session) -> None:
|
||||||
|
block = _insert_block(session, height=1)
|
||||||
|
|
||||||
|
tx = Transaction(
|
||||||
|
tx_hash="0x" + "1" * 64,
|
||||||
|
block_height=block.height,
|
||||||
|
sender="alice",
|
||||||
|
recipient="bob",
|
||||||
|
payload={"foo": "bar"},
|
||||||
|
)
|
||||||
|
receipt = Receipt(
|
||||||
|
job_id="job-1",
|
||||||
|
receipt_id="0x" + "2" * 64,
|
||||||
|
block_height=block.height,
|
||||||
|
payload={},
|
||||||
|
miner_signature={},
|
||||||
|
coordinator_attestations=[],
|
||||||
|
)
|
||||||
|
session.add(tx)
|
||||||
|
session.add(receipt)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(tx)
|
||||||
|
session.refresh(receipt)
|
||||||
|
|
||||||
|
assert tx.block is not None
|
||||||
|
assert tx.block.hash == block.hash
|
||||||
|
assert receipt.block is not None
|
||||||
|
assert receipt.block.hash == block.hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_hash_validation_accepts_hex(session: Session) -> None:
|
||||||
|
block = Block(
|
||||||
|
height=10,
|
||||||
|
hash="0x" + "a" * 64,
|
||||||
|
parent_hash="0x" + "b" * 64,
|
||||||
|
proposer="validator",
|
||||||
|
)
|
||||||
|
session.add(block)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(block)
|
||||||
|
|
||||||
|
assert block.hash.startswith("0x")
|
||||||
|
assert block.parent_hash.startswith("0x")
|
||||||
|
|
||||||
|
|
||||||
|
def test_hash_validation_rejects_non_hex(session: Session) -> None:
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Block(
|
||||||
|
height=20,
|
||||||
|
hash="not-hex",
|
||||||
|
parent_hash="0x" + "c" * 64,
|
||||||
|
proposer="validator",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Transaction(
|
||||||
|
tx_hash="bad",
|
||||||
|
sender="alice",
|
||||||
|
recipient="bob",
|
||||||
|
payload={},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
Receipt(
|
||||||
|
job_id="job",
|
||||||
|
receipt_id="oops",
|
||||||
|
payload={},
|
||||||
|
miner_signature={},
|
||||||
|
coordinator_attestations=[],
|
||||||
|
)
|
||||||
46
apps/blockchain-node/tests/test_websocket.py
Normal file
46
apps/blockchain-node/tests/test_websocket.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from aitbc_chain.app import create_app
|
||||||
|
from aitbc_chain.gossip import gossip_broker
|
||||||
|
|
||||||
|
|
||||||
|
def _publish(topic: str, message: dict) -> None:
|
||||||
|
asyncio.run(gossip_broker.publish(topic, message))
|
||||||
|
|
||||||
|
|
||||||
|
def test_blocks_websocket_stream() -> None:
|
||||||
|
client = TestClient(create_app())
|
||||||
|
|
||||||
|
with client.websocket_connect("/rpc/ws/blocks") as websocket:
|
||||||
|
payload = {
|
||||||
|
"height": 1,
|
||||||
|
"hash": "0x" + "1" * 64,
|
||||||
|
"parent_hash": "0x" + "0" * 64,
|
||||||
|
"timestamp": "2025-01-01T00:00:00Z",
|
||||||
|
"tx_count": 2,
|
||||||
|
}
|
||||||
|
_publish("blocks", payload)
|
||||||
|
message = websocket.receive_json()
|
||||||
|
assert message == payload
|
||||||
|
|
||||||
|
|
||||||
|
def test_transactions_websocket_stream() -> None:
|
||||||
|
client = TestClient(create_app())
|
||||||
|
|
||||||
|
with client.websocket_connect("/rpc/ws/transactions") as websocket:
|
||||||
|
payload = {
|
||||||
|
"tx_hash": "0x" + "a" * 64,
|
||||||
|
"sender": "alice",
|
||||||
|
"recipient": "bob",
|
||||||
|
"payload": {"amount": 1},
|
||||||
|
"nonce": 1,
|
||||||
|
"fee": 0,
|
||||||
|
"type": "TRANSFER",
|
||||||
|
}
|
||||||
|
_publish("transactions", payload)
|
||||||
|
message = websocket.receive_json()
|
||||||
|
assert message == payload
|
||||||
@ -6,6 +6,45 @@
|
|||||||
z-index: 1000;
|
z-index: 1000;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media (max-width: 600px) {
|
||||||
|
.page {
|
||||||
|
padding: 1.5rem 1rem 3rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__inner {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__controls {
|
||||||
|
align-items: stretch;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav {
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav a {
|
||||||
|
flex: 1 1 45%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__input-group,
|
||||||
|
.receipts__input-group {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast-container {
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
top: auto;
|
||||||
|
bottom: 1rem;
|
||||||
|
width: min(90vw, 360px);
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.site-header__inner {
|
.site-header__inner {
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
max-width: 1200px;
|
max-width: 1200px;
|
||||||
@ -80,6 +119,37 @@
|
|||||||
padding: 2rem 1.5rem 4rem;
|
padding: 2rem 1.5rem 4rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.toast-container {
|
||||||
|
position: fixed;
|
||||||
|
top: 1.25rem;
|
||||||
|
right: 1.25rem;
|
||||||
|
display: grid;
|
||||||
|
gap: 0.75rem;
|
||||||
|
z-index: 1200;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast {
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(-6px);
|
||||||
|
transition: opacity 150ms ease, transform 180ms ease;
|
||||||
|
border-radius: 0.75rem;
|
||||||
|
padding: 0.75rem 1rem;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
min-width: 220px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast--error {
|
||||||
|
background: rgba(255, 102, 102, 0.16);
|
||||||
|
border: 1px solid rgba(255, 102, 102, 0.35);
|
||||||
|
color: #ffd3d3;
|
||||||
|
box-shadow: 0 12px 30px rgba(0, 0, 0, 0.35);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.is-visible {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0px);
|
||||||
|
}
|
||||||
|
|
||||||
@media (max-width: 768px) {
|
@media (max-width: 768px) {
|
||||||
.site-header__inner {
|
.site-header__inner {
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
|
|||||||
34
apps/explorer-web/src/components/notifications.ts
Normal file
34
apps/explorer-web/src/components/notifications.ts
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
const TOAST_DURATION_MS = 4000;
|
||||||
|
|
||||||
|
let container: HTMLDivElement | null = null;
|
||||||
|
|
||||||
|
export function initNotifications(): void {
|
||||||
|
if (!container) {
|
||||||
|
container = document.createElement("div");
|
||||||
|
container.className = "toast-container";
|
||||||
|
document.body.appendChild(container);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function notifyError(message: string): void {
|
||||||
|
if (!container) {
|
||||||
|
initNotifications();
|
||||||
|
}
|
||||||
|
if (!container) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const toast = document.createElement("div");
|
||||||
|
toast.className = "toast toast--error";
|
||||||
|
toast.textContent = message;
|
||||||
|
container.appendChild(toast);
|
||||||
|
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
toast.classList.add("is-visible");
|
||||||
|
});
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
toast.classList.remove("is-visible");
|
||||||
|
setTimeout(() => toast.remove(), 250);
|
||||||
|
}, TOAST_DURATION_MS);
|
||||||
|
}
|
||||||
@ -1,4 +1,5 @@
|
|||||||
import { CONFIG, type DataMode } from "../config";
|
import { CONFIG, type DataMode } from "../config";
|
||||||
|
import { notifyError } from "../components/notifications";
|
||||||
import type {
|
import type {
|
||||||
BlockListResponse,
|
BlockListResponse,
|
||||||
TransactionListResponse,
|
TransactionListResponse,
|
||||||
@ -35,6 +36,7 @@ export async function fetchBlocks(): Promise<BlockSummary[]> {
|
|||||||
return data.items;
|
return data.items;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn("[Explorer] Failed to fetch live block data", error);
|
console.warn("[Explorer] Failed to fetch live block data", error);
|
||||||
|
notifyError("Unable to load live block data. Displaying placeholders.");
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -54,6 +56,7 @@ export async function fetchTransactions(): Promise<TransactionSummary[]> {
|
|||||||
return data.items;
|
return data.items;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn("[Explorer] Failed to fetch live transaction data", error);
|
console.warn("[Explorer] Failed to fetch live transaction data", error);
|
||||||
|
notifyError("Unable to load live transaction data. Displaying placeholders.");
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -73,6 +76,7 @@ export async function fetchAddresses(): Promise<AddressSummary[]> {
|
|||||||
return Array.isArray(data) ? data : data.items;
|
return Array.isArray(data) ? data : data.items;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn("[Explorer] Failed to fetch live address data", error);
|
console.warn("[Explorer] Failed to fetch live address data", error);
|
||||||
|
notifyError("Unable to load live address data. Displaying placeholders.");
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -92,6 +96,7 @@ export async function fetchReceipts(): Promise<ReceiptSummary[]> {
|
|||||||
return data.items;
|
return data.items;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn("[Explorer] Failed to fetch live receipt data", error);
|
console.warn("[Explorer] Failed to fetch live receipt data", error);
|
||||||
|
notifyError("Unable to load live receipt data. Displaying placeholders.");
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -107,6 +112,7 @@ async function fetchMock<T>(resource: string): Promise<T> {
|
|||||||
return (await response.json()) as T;
|
return (await response.json()) as T;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn(`[Explorer] Failed to fetch mock data from ${url}`, error);
|
console.warn(`[Explorer] Failed to fetch mock data from ${url}`, error);
|
||||||
|
notifyError("Mock data is unavailable. Please verify development assets.");
|
||||||
return [] as unknown as T;
|
return [] as unknown as T;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import { addressesTitle, renderAddressesPage, initAddressesPage } from "./pages/
|
|||||||
import { receiptsTitle, renderReceiptsPage, initReceiptsPage } from "./pages/receipts";
|
import { receiptsTitle, renderReceiptsPage, initReceiptsPage } from "./pages/receipts";
|
||||||
import { initDataModeToggle } from "./components/dataModeToggle";
|
import { initDataModeToggle } from "./components/dataModeToggle";
|
||||||
import { getDataMode } from "./lib/mockData";
|
import { getDataMode } from "./lib/mockData";
|
||||||
|
import { initNotifications } from "./components/notifications";
|
||||||
|
|
||||||
type PageConfig = {
|
type PageConfig = {
|
||||||
title: string;
|
title: string;
|
||||||
@ -49,14 +50,13 @@ const routes: Record<string, PageConfig> = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function render(): void {
|
function render(): void {
|
||||||
|
initNotifications();
|
||||||
const root = document.querySelector<HTMLDivElement>("#app");
|
const root = document.querySelector<HTMLDivElement>("#app");
|
||||||
if (!root) {
|
if (!root) {
|
||||||
console.warn("[Explorer] Missing #app root element");
|
console.warn("[Explorer] Missing #app root element");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
document.documentElement.dataset.mode = getDataMode();
|
|
||||||
|
|
||||||
const currentPath = window.location.pathname.replace(/\/$/, "");
|
const currentPath = window.location.pathname.replace(/\/$/, "");
|
||||||
const normalizedPath = currentPath === "" ? "/" : currentPath;
|
const normalizedPath = currentPath === "" ? "/" : currentPath;
|
||||||
const page = routes[normalizedPath] ?? null;
|
const page = routes[normalizedPath] ?? null;
|
||||||
|
|||||||
@ -40,7 +40,6 @@ export async function initOverviewPage(): Promise<void> {
|
|||||||
fetchTransactions(),
|
fetchTransactions(),
|
||||||
fetchReceipts(),
|
fetchReceipts(),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const blockStats = document.querySelector<HTMLUListElement>(
|
const blockStats = document.querySelector<HTMLUListElement>(
|
||||||
"#overview-block-stats",
|
"#overview-block-stats",
|
||||||
);
|
);
|
||||||
@ -54,13 +53,12 @@ export async function initOverviewPage(): Promise<void> {
|
|||||||
<li><strong>Time:</strong> ${new Date(latest.timestamp).toLocaleString()}</li>
|
<li><strong>Time:</strong> ${new Date(latest.timestamp).toLocaleString()}</li>
|
||||||
`;
|
`;
|
||||||
} else {
|
} else {
|
||||||
blockStats.innerHTML = `<li class="placeholder">No mock block data available.</li>`;
|
blockStats.innerHTML = `
|
||||||
|
<li class="placeholder">No blocks available. Try switching data mode.</li>
|
||||||
|
`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const txStats = document.querySelector<HTMLUListElement>("#overview-transaction-stats");
|
||||||
const txStats = document.querySelector<HTMLUListElement>(
|
|
||||||
"#overview-transaction-stats",
|
|
||||||
);
|
|
||||||
if (txStats) {
|
if (txStats) {
|
||||||
if (transactions.length > 0) {
|
if (transactions.length > 0) {
|
||||||
const succeeded = transactions.filter((tx) => tx.status === "Succeeded");
|
const succeeded = transactions.filter((tx) => tx.status === "Succeeded");
|
||||||
@ -70,7 +68,7 @@ export async function initOverviewPage(): Promise<void> {
|
|||||||
<li><strong>Pending:</strong> ${transactions.length - succeeded.length}</li>
|
<li><strong>Pending:</strong> ${transactions.length - succeeded.length}</li>
|
||||||
`;
|
`;
|
||||||
} else {
|
} else {
|
||||||
txStats.innerHTML = `<li class="placeholder">No mock transaction data available.</li>`;
|
txStats.innerHTML = `<li class="placeholder">No transactions available. Try switching data mode.</li>`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,7 +84,7 @@ export async function initOverviewPage(): Promise<void> {
|
|||||||
<li><strong>Pending:</strong> ${receipts.length - attested.length}</li>
|
<li><strong>Pending:</strong> ${receipts.length - attested.length}</li>
|
||||||
`;
|
`;
|
||||||
} else {
|
} else {
|
||||||
receiptStats.innerHTML = `<li class="placeholder">No mock receipt data available.</li>`;
|
receiptStats.innerHTML = `<li class="placeholder">No receipts available. Try switching data mode.</li>`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
47
apps/pool-hub/migrations/env.py
Normal file
47
apps/pool-hub/migrations/env.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
|
||||||
|
|
||||||
|
from poolhub.models import Base
|
||||||
|
from poolhub.settings import settings
|
||||||
|
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_context(connection=None, *, url: str | None = None) -> None:
|
||||||
|
context.configure(
|
||||||
|
connection=connection,
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
_configure_context(url=settings.postgres_dsn)
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_migrations_online() -> None:
|
||||||
|
connectable = create_async_engine(settings.postgres_dsn, pool_pre_ping=True)
|
||||||
|
async with connectable.connect() as connection:
|
||||||
|
await connection.run_sync(_configure_context)
|
||||||
|
await connection.run_sync(lambda conn: context.run_migrations())
|
||||||
|
await connectable.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
asyncio.run(run_migrations_online())
|
||||||
104
apps/pool-hub/migrations/versions/a58c1f3b3e87_initial_schema.py
Normal file
104
apps/pool-hub/migrations/versions/a58c1f3b3e87_initial_schema.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
"""initial schema
|
||||||
|
|
||||||
|
Revision ID: a58c1f3b3e87
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-09-27 12:07:40.000000
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "a58c1f3b3e87"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"miners",
|
||||||
|
sa.Column("miner_id", sa.String(length=64), primary_key=True),
|
||||||
|
sa.Column("api_key_hash", sa.String(length=128), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
|
||||||
|
sa.Column("last_seen_at", sa.DateTime(timezone=True)),
|
||||||
|
sa.Column("addr", sa.String(length=256)),
|
||||||
|
sa.Column("proto", sa.String(length=32)),
|
||||||
|
sa.Column("gpu_vram_gb", sa.Float()),
|
||||||
|
sa.Column("gpu_name", sa.String(length=128)),
|
||||||
|
sa.Column("cpu_cores", sa.Integer()),
|
||||||
|
sa.Column("ram_gb", sa.Float()),
|
||||||
|
sa.Column("max_parallel", sa.Integer()),
|
||||||
|
sa.Column("base_price", sa.Float()),
|
||||||
|
sa.Column("tags", postgresql.JSONB(astext_type=sa.Text())),
|
||||||
|
sa.Column("capabilities", postgresql.JSONB(astext_type=sa.Text())),
|
||||||
|
sa.Column("trust_score", sa.Float(), server_default="0.5"),
|
||||||
|
sa.Column("region", sa.String(length=64)),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"miner_status",
|
||||||
|
sa.Column("miner_id", sa.String(length=64), sa.ForeignKey("miners.miner_id", ondelete="CASCADE"), primary_key=True),
|
||||||
|
sa.Column("queue_len", sa.Integer(), server_default="0"),
|
||||||
|
sa.Column("busy", sa.Boolean(), server_default=sa.text("false")),
|
||||||
|
sa.Column("avg_latency_ms", sa.Integer()),
|
||||||
|
sa.Column("temp_c", sa.Integer()),
|
||||||
|
sa.Column("mem_free_gb", sa.Float()),
|
||||||
|
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"match_requests",
|
||||||
|
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True),
|
||||||
|
sa.Column("job_id", sa.String(length=64), nullable=False),
|
||||||
|
sa.Column("requirements", postgresql.JSONB(astext_type=sa.Text()), nullable=False),
|
||||||
|
sa.Column("hints", postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb")),
|
||||||
|
sa.Column("top_k", sa.Integer(), server_default="1"),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"match_results",
|
||||||
|
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True),
|
||||||
|
sa.Column("request_id", postgresql.UUID(as_uuid=True), sa.ForeignKey("match_requests.id", ondelete="CASCADE"), nullable=False),
|
||||||
|
sa.Column("miner_id", sa.String(length=64), nullable=False),
|
||||||
|
sa.Column("score", sa.Float(), nullable=False),
|
||||||
|
sa.Column("explain", sa.Text()),
|
||||||
|
sa.Column("eta_ms", sa.Integer()),
|
||||||
|
sa.Column("price", sa.Float()),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
|
||||||
|
)
|
||||||
|
op.create_index("ix_match_results_request_id", "match_results", ["request_id"])
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"feedback",
|
||||||
|
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True),
|
||||||
|
sa.Column("job_id", sa.String(length=64), nullable=False),
|
||||||
|
sa.Column("miner_id", sa.String(length=64), sa.ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False),
|
||||||
|
sa.Column("outcome", sa.String(length=32), nullable=False),
|
||||||
|
sa.Column("latency_ms", sa.Integer()),
|
||||||
|
sa.Column("fail_code", sa.String(length=64)),
|
||||||
|
sa.Column("tokens_spent", sa.Float()),
|
||||||
|
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
|
||||||
|
)
|
||||||
|
op.create_index("ix_feedback_miner_id", "feedback", ["miner_id"])
|
||||||
|
op.create_index("ix_feedback_job_id", "feedback", ["job_id"])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_index("ix_feedback_job_id", table_name="feedback")
|
||||||
|
op.drop_index("ix_feedback_miner_id", table_name="feedback")
|
||||||
|
op.drop_table("feedback")
|
||||||
|
|
||||||
|
op.drop_index("ix_match_results_request_id", table_name="match_results")
|
||||||
|
op.drop_table("match_results")
|
||||||
|
|
||||||
|
op.drop_table("match_requests")
|
||||||
|
|
||||||
|
op.drop_table("miner_status")
|
||||||
|
|
||||||
|
op.drop_table("miners")
|
||||||
13
apps/pool-hub/src/poolhub/__init__.py
Normal file
13
apps/pool-hub/src/poolhub/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
"""AITBC Pool Hub service package."""
|
||||||
|
|
||||||
|
from .settings import Settings, settings
|
||||||
|
from .database import create_engine, get_session
|
||||||
|
from .redis_cache import get_redis
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Settings",
|
||||||
|
"settings",
|
||||||
|
"create_engine",
|
||||||
|
"get_session",
|
||||||
|
"get_redis",
|
||||||
|
]
|
||||||
5
apps/pool-hub/src/poolhub/app/__init__.py
Normal file
5
apps/pool-hub/src/poolhub/app/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""FastAPI application wiring for the AITBC Pool Hub."""
|
||||||
|
|
||||||
|
from .main import create_app, app
|
||||||
|
|
||||||
|
__all__ = ["create_app", "app"]
|
||||||
27
apps/pool-hub/src/poolhub/app/deps.py
Normal file
27
apps/pool-hub/src/poolhub/app/deps.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import AsyncGenerator
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
|
||||||
|
from ..database import get_session
|
||||||
|
from ..redis_cache import get_redis
|
||||||
|
|
||||||
|
|
||||||
|
def get_db_session() -> AsyncGenerator:
|
||||||
|
return get_session()
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_client() -> AsyncGenerator:
|
||||||
|
return get_redis()
|
||||||
|
|
||||||
|
|
||||||
|
# FastAPI dependency wrappers
|
||||||
|
async def db_session_dep(session=Depends(get_session)):
|
||||||
|
async for s in session:
|
||||||
|
yield s
|
||||||
|
|
||||||
|
|
||||||
|
async def redis_dep(client=Depends(get_redis)):
|
||||||
|
async for c in client:
|
||||||
|
yield c
|
||||||
31
apps/pool-hub/src/poolhub/app/main.py
Normal file
31
apps/pool-hub/src/poolhub/app/main.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
from ..database import close_engine, create_engine
|
||||||
|
from ..redis_cache import close_redis, create_redis
|
||||||
|
from ..settings import settings
|
||||||
|
from .routers import health_router, match_router, metrics_router
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(_: FastAPI):
|
||||||
|
create_engine()
|
||||||
|
create_redis()
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
await close_engine()
|
||||||
|
await close_redis()
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI(**settings.asgi_kwargs(), lifespan=lifespan)
|
||||||
|
app.include_router(match_router, prefix="/v1")
|
||||||
|
app.include_router(health_router)
|
||||||
|
app.include_router(metrics_router)
|
||||||
|
|
||||||
|
|
||||||
|
def create_app() -> FastAPI:
|
||||||
|
return app
|
||||||
39
apps/pool-hub/src/poolhub/app/prometheus.py
Normal file
39
apps/pool-hub/src/poolhub/app/prometheus.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from prometheus_client import CONTENT_TYPE_LATEST, Counter, Gauge, Histogram, generate_latest
|
||||||
|
|
||||||
|
match_requests_total = Counter(
|
||||||
|
"poolhub_match_requests_total",
|
||||||
|
"Total number of match requests received",
|
||||||
|
)
|
||||||
|
match_candidates_returned = Counter(
|
||||||
|
"poolhub_match_candidates_total",
|
||||||
|
"Total number of candidates returned",
|
||||||
|
)
|
||||||
|
match_failures_total = Counter(
|
||||||
|
"poolhub_match_failures_total",
|
||||||
|
"Total number of match request failures",
|
||||||
|
)
|
||||||
|
match_latency_seconds = Histogram(
|
||||||
|
"poolhub_match_latency_seconds",
|
||||||
|
"Latency of match processing",
|
||||||
|
buckets=(0.01, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0),
|
||||||
|
)
|
||||||
|
miners_online_gauge = Gauge(
|
||||||
|
"poolhub_miners_online",
|
||||||
|
"Number of miners considered online",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def render_metrics() -> tuple[str, str]:
|
||||||
|
return generate_latest(), CONTENT_TYPE_LATEST
|
||||||
|
|
||||||
|
|
||||||
|
def reset_metrics() -> None:
|
||||||
|
match_requests_total._value.set(0) # type: ignore[attr-defined]
|
||||||
|
match_candidates_returned._value.set(0) # type: ignore[attr-defined]
|
||||||
|
match_failures_total._value.set(0) # type: ignore[attr-defined]
|
||||||
|
match_latency_seconds._sum.set(0) # type: ignore[attr-defined]
|
||||||
|
match_latency_seconds._count.set(0) # type: ignore[attr-defined]
|
||||||
|
match_latency_seconds._samples = [] # type: ignore[attr-defined]
|
||||||
|
miners_online_gauge._value.set(0) # type: ignore[attr-defined]
|
||||||
7
apps/pool-hub/src/poolhub/app/routers/__init__.py
Normal file
7
apps/pool-hub/src/poolhub/app/routers/__init__.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
"""FastAPI routers for Pool Hub."""
|
||||||
|
|
||||||
|
from .match import router as match_router
|
||||||
|
from .health import router as health_router
|
||||||
|
from .metrics import router as metrics_router
|
||||||
|
|
||||||
|
__all__ = ["match_router", "health_router", "metrics_router"]
|
||||||
50
apps/pool-hub/src/poolhub/app/routers/health.py
Normal file
50
apps/pool-hub/src/poolhub/app/routers/health.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ..deps import db_session_dep, redis_dep
|
||||||
|
from ..prometheus import miners_online_gauge
|
||||||
|
from poolhub.repositories.miner_repository import MinerRepository
|
||||||
|
from ..schemas import HealthResponse
|
||||||
|
|
||||||
|
router = APIRouter(tags=["health"], prefix="/v1")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/health", response_model=HealthResponse, summary="Pool Hub health status")
|
||||||
|
async def health_endpoint(
|
||||||
|
session: AsyncSession = Depends(db_session_dep),
|
||||||
|
redis: Redis = Depends(redis_dep),
|
||||||
|
) -> HealthResponse:
|
||||||
|
db_ok = True
|
||||||
|
redis_ok = True
|
||||||
|
db_error: str | None = None
|
||||||
|
redis_error: str | None = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
await session.execute("SELECT 1")
|
||||||
|
except Exception as exc: # pragma: no cover
|
||||||
|
db_ok = False
|
||||||
|
db_error = str(exc)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await redis.ping()
|
||||||
|
except Exception as exc: # pragma: no cover
|
||||||
|
redis_ok = False
|
||||||
|
redis_error = str(exc)
|
||||||
|
|
||||||
|
miner_repo = MinerRepository(session, redis)
|
||||||
|
active_miners = await miner_repo.list_active_miners()
|
||||||
|
miners_online = len(active_miners)
|
||||||
|
miners_online_gauge.set(miners_online)
|
||||||
|
|
||||||
|
status = "ok" if db_ok and redis_ok else "degraded"
|
||||||
|
return HealthResponse(
|
||||||
|
status=status,
|
||||||
|
db=db_ok,
|
||||||
|
redis=redis_ok,
|
||||||
|
miners_online=miners_online,
|
||||||
|
db_error=db_error,
|
||||||
|
redis_error=redis_error,
|
||||||
|
)
|
||||||
116
apps/pool-hub/src/poolhub/app/routers/match.py
Normal file
116
apps/pool-hub/src/poolhub/app/routers/match.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ..deps import db_session_dep, redis_dep
|
||||||
|
from ..prometheus import (
|
||||||
|
match_candidates_returned,
|
||||||
|
match_failures_total,
|
||||||
|
match_latency_seconds,
|
||||||
|
match_requests_total,
|
||||||
|
)
|
||||||
|
from poolhub.repositories.match_repository import MatchRepository
|
||||||
|
from poolhub.repositories.miner_repository import MinerRepository
|
||||||
|
from ..schemas import MatchCandidate, MatchRequestPayload, MatchResponse
|
||||||
|
|
||||||
|
router = APIRouter(tags=["match"])
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_requirements(requirements: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
return requirements or {}
|
||||||
|
|
||||||
|
|
||||||
|
def _candidate_from_payload(payload: Dict[str, Any]) -> MatchCandidate:
|
||||||
|
return MatchCandidate(**payload)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/match", response_model=MatchResponse, summary="Find top miners for a job")
|
||||||
|
async def match_endpoint(
|
||||||
|
payload: MatchRequestPayload,
|
||||||
|
session: AsyncSession = Depends(db_session_dep),
|
||||||
|
redis: Redis = Depends(redis_dep),
|
||||||
|
) -> MatchResponse:
|
||||||
|
start = time.perf_counter()
|
||||||
|
match_requests_total.inc()
|
||||||
|
|
||||||
|
miner_repo = MinerRepository(session, redis)
|
||||||
|
match_repo = MatchRepository(session, redis)
|
||||||
|
|
||||||
|
requirements = _normalize_requirements(payload.requirements)
|
||||||
|
top_k = payload.top_k
|
||||||
|
|
||||||
|
try:
|
||||||
|
request = await match_repo.create_request(
|
||||||
|
job_id=payload.job_id,
|
||||||
|
requirements=requirements,
|
||||||
|
hints=payload.hints,
|
||||||
|
top_k=top_k,
|
||||||
|
)
|
||||||
|
|
||||||
|
active_miners = await miner_repo.list_active_miners()
|
||||||
|
candidates = _select_candidates(requirements, payload.hints, active_miners, top_k)
|
||||||
|
|
||||||
|
await match_repo.add_results(
|
||||||
|
request_id=request.id,
|
||||||
|
candidates=candidates,
|
||||||
|
)
|
||||||
|
|
||||||
|
match_candidates_returned.inc(len(candidates))
|
||||||
|
duration = time.perf_counter() - start
|
||||||
|
match_latency_seconds.observe(duration)
|
||||||
|
|
||||||
|
return MatchResponse(
|
||||||
|
job_id=payload.job_id,
|
||||||
|
candidates=[_candidate_from_payload(candidate) for candidate in candidates],
|
||||||
|
)
|
||||||
|
except Exception as exc: # pragma: no cover - safeguards unexpected failures
|
||||||
|
match_failures_total.inc()
|
||||||
|
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="match_failed") from exc
|
||||||
|
|
||||||
|
|
||||||
|
def _select_candidates(
|
||||||
|
requirements: Dict[str, Any],
|
||||||
|
hints: Dict[str, Any],
|
||||||
|
active_miners: List[tuple],
|
||||||
|
top_k: int,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
min_vram = float(requirements.get("min_vram_gb", 0))
|
||||||
|
min_ram = float(requirements.get("min_ram_gb", 0))
|
||||||
|
capabilities_required = set(requirements.get("capabilities_any", []))
|
||||||
|
region_hint = hints.get("region")
|
||||||
|
|
||||||
|
ranked: List[Dict[str, Any]] = []
|
||||||
|
for miner, status, score in active_miners:
|
||||||
|
if miner.gpu_vram_gb and miner.gpu_vram_gb < min_vram:
|
||||||
|
continue
|
||||||
|
if miner.ram_gb and miner.ram_gb < min_ram:
|
||||||
|
continue
|
||||||
|
if capabilities_required and not capabilities_required.issubset(set(miner.capabilities or [])):
|
||||||
|
continue
|
||||||
|
if region_hint and miner.region and miner.region != region_hint:
|
||||||
|
continue
|
||||||
|
|
||||||
|
candidate = {
|
||||||
|
"miner_id": miner.miner_id,
|
||||||
|
"addr": miner.addr,
|
||||||
|
"proto": miner.proto,
|
||||||
|
"score": float(score),
|
||||||
|
"explain": _compose_explain(score, miner, status),
|
||||||
|
"eta_ms": status.avg_latency_ms if status else None,
|
||||||
|
"price": miner.base_price,
|
||||||
|
}
|
||||||
|
ranked.append(candidate)
|
||||||
|
|
||||||
|
ranked.sort(key=lambda item: item["score"], reverse=True)
|
||||||
|
return ranked[:top_k]
|
||||||
|
|
||||||
|
|
||||||
|
def _compose_explain(score: float, miner, status) -> str:
|
||||||
|
load = status.queue_len if status else 0
|
||||||
|
latency = status.avg_latency_ms if status else "n/a"
|
||||||
|
return f"score={score:.3f} load={load} latency={latency}"
|
||||||
13
apps/pool-hub/src/poolhub/app/routers/metrics.py
Normal file
13
apps/pool-hub/src/poolhub/app/routers/metrics.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Response
|
||||||
|
|
||||||
|
from ..prometheus import render_metrics
|
||||||
|
|
||||||
|
router = APIRouter(tags=["metrics"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/metrics", summary="Prometheus metrics")
|
||||||
|
async def metrics_endpoint() -> Response:
|
||||||
|
payload, content_type = render_metrics()
|
||||||
|
return Response(content=payload, media_type=content_type)
|
||||||
40
apps/pool-hub/src/poolhub/app/schemas.py
Normal file
40
apps/pool-hub/src/poolhub/app/schemas.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class MatchRequestPayload(BaseModel):
|
||||||
|
job_id: str
|
||||||
|
requirements: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
hints: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
top_k: int = Field(default=1, ge=1, le=50)
|
||||||
|
|
||||||
|
|
||||||
|
class MatchCandidate(BaseModel):
|
||||||
|
miner_id: str
|
||||||
|
addr: str
|
||||||
|
proto: str
|
||||||
|
score: float
|
||||||
|
explain: Optional[str] = None
|
||||||
|
eta_ms: Optional[int] = None
|
||||||
|
price: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MatchResponse(BaseModel):
|
||||||
|
job_id: str
|
||||||
|
candidates: List[MatchCandidate]
|
||||||
|
|
||||||
|
|
||||||
|
class HealthResponse(BaseModel):
|
||||||
|
status: str
|
||||||
|
db: bool
|
||||||
|
redis: bool
|
||||||
|
miners_online: int
|
||||||
|
db_error: Optional[str] = None
|
||||||
|
redis_error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsResponse(BaseModel):
|
||||||
|
detail: str = "Prometheus metrics output"
|
||||||
54
apps/pool-hub/src/poolhub/database.py
Normal file
54
apps/pool-hub/src/poolhub/database.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
|
from .settings import settings
|
||||||
|
|
||||||
|
_engine: AsyncEngine | None = None
|
||||||
|
_session_factory: async_sessionmaker[AsyncSession] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def create_engine() -> AsyncEngine:
|
||||||
|
global _engine, _session_factory
|
||||||
|
if _engine is None:
|
||||||
|
_engine = create_async_engine(
|
||||||
|
settings.postgres_dsn,
|
||||||
|
pool_size=settings.postgres_pool_max,
|
||||||
|
max_overflow=0,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
)
|
||||||
|
_session_factory = async_sessionmaker(
|
||||||
|
bind=_engine,
|
||||||
|
expire_on_commit=False,
|
||||||
|
autoflush=False,
|
||||||
|
)
|
||||||
|
return _engine
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine() -> AsyncEngine:
|
||||||
|
if _engine is None:
|
||||||
|
return create_engine()
|
||||||
|
return _engine
|
||||||
|
|
||||||
|
|
||||||
|
def get_session_factory() -> async_sessionmaker[AsyncSession]:
|
||||||
|
if _session_factory is None:
|
||||||
|
create_engine()
|
||||||
|
assert _session_factory is not None
|
||||||
|
return _session_factory
|
||||||
|
|
||||||
|
|
||||||
|
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
session_factory = get_session_factory()
|
||||||
|
async with session_factory() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
async def close_engine() -> None:
|
||||||
|
global _engine
|
||||||
|
if _engine is not None:
|
||||||
|
await _engine.dispose()
|
||||||
|
_engine = None
|
||||||
|
|
||||||
95
apps/pool-hub/src/poolhub/models.py
Normal file
95
apps/pool-hub/src/poolhub/models.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text
|
||||||
|
from sqlalchemy.dialects.postgresql import JSONB, UUID as PGUUID
|
||||||
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Miner(Base):
|
||||||
|
__tablename__ = "miners"
|
||||||
|
|
||||||
|
miner_id: Mapped[str] = mapped_column(String(64), primary_key=True)
|
||||||
|
api_key_hash: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||||
|
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
|
||||||
|
last_seen_at: Mapped[Optional[dt.datetime]] = mapped_column(DateTime(timezone=True))
|
||||||
|
addr: Mapped[str] = mapped_column(String(256))
|
||||||
|
proto: Mapped[str] = mapped_column(String(32))
|
||||||
|
gpu_vram_gb: Mapped[float] = mapped_column(Float)
|
||||||
|
gpu_name: Mapped[Optional[str]] = mapped_column(String(128))
|
||||||
|
cpu_cores: Mapped[int] = mapped_column(Integer)
|
||||||
|
ram_gb: Mapped[float] = mapped_column(Float)
|
||||||
|
max_parallel: Mapped[int] = mapped_column(Integer)
|
||||||
|
base_price: Mapped[float] = mapped_column(Float)
|
||||||
|
tags: Mapped[Dict[str, str]] = mapped_column(JSONB, default=dict)
|
||||||
|
capabilities: Mapped[List[str]] = mapped_column(JSONB, default=list)
|
||||||
|
trust_score: Mapped[float] = mapped_column(Float, default=0.5)
|
||||||
|
region: Mapped[Optional[str]] = mapped_column(String(64))
|
||||||
|
|
||||||
|
status: Mapped["MinerStatus"] = relationship(back_populates="miner", cascade="all, delete-orphan", uselist=False)
|
||||||
|
feedback: Mapped[List["Feedback"]] = relationship(back_populates="miner", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
|
||||||
|
class MinerStatus(Base):
|
||||||
|
__tablename__ = "miner_status"
|
||||||
|
|
||||||
|
miner_id: Mapped[str] = mapped_column(ForeignKey("miners.miner_id", ondelete="CASCADE"), primary_key=True)
|
||||||
|
queue_len: Mapped[int] = mapped_column(Integer, default=0)
|
||||||
|
busy: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||||
|
avg_latency_ms: Mapped[Optional[int]] = mapped_column(Integer)
|
||||||
|
temp_c: Mapped[Optional[int]] = mapped_column(Integer)
|
||||||
|
mem_free_gb: Mapped[Optional[float]] = mapped_column(Float)
|
||||||
|
updated_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow, onupdate=dt.datetime.utcnow)
|
||||||
|
|
||||||
|
miner: Mapped[Miner] = relationship(back_populates="status")
|
||||||
|
|
||||||
|
|
||||||
|
class MatchRequest(Base):
|
||||||
|
__tablename__ = "match_requests"
|
||||||
|
|
||||||
|
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||||
|
job_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||||
|
requirements: Mapped[Dict[str, object]] = mapped_column(JSONB, nullable=False)
|
||||||
|
hints: Mapped[Dict[str, object]] = mapped_column(JSONB, default=dict)
|
||||||
|
top_k: Mapped[int] = mapped_column(Integer, default=1)
|
||||||
|
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
|
||||||
|
|
||||||
|
results: Mapped[List["MatchResult"]] = relationship(back_populates="request", cascade="all, delete-orphan")
|
||||||
|
|
||||||
|
|
||||||
|
class MatchResult(Base):
|
||||||
|
__tablename__ = "match_results"
|
||||||
|
|
||||||
|
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||||
|
request_id: Mapped[PGUUID] = mapped_column(ForeignKey("match_requests.id", ondelete="CASCADE"), index=True)
|
||||||
|
miner_id: Mapped[str] = mapped_column(String(64))
|
||||||
|
score: Mapped[float] = mapped_column(Float)
|
||||||
|
explain: Mapped[Optional[str]] = mapped_column(Text)
|
||||||
|
eta_ms: Mapped[Optional[int]] = mapped_column(Integer)
|
||||||
|
price: Mapped[Optional[float]] = mapped_column(Float)
|
||||||
|
|
||||||
|
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
|
||||||
|
|
||||||
|
request: Mapped[MatchRequest] = relationship(back_populates="results")
|
||||||
|
|
||||||
|
|
||||||
|
class Feedback(Base):
|
||||||
|
__tablename__ = "feedback"
|
||||||
|
|
||||||
|
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
||||||
|
job_id: Mapped[str] = mapped_column(String(64), nullable=False)
|
||||||
|
miner_id: Mapped[str] = mapped_column(ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False)
|
||||||
|
outcome: Mapped[str] = mapped_column(String(32), nullable=False)
|
||||||
|
latency_ms: Mapped[Optional[int]] = mapped_column(Integer)
|
||||||
|
fail_code: Mapped[Optional[str]] = mapped_column(String(64))
|
||||||
|
tokens_spent: Mapped[Optional[float]] = mapped_column(Float)
|
||||||
|
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
|
||||||
|
|
||||||
|
miner: Mapped[Miner] = relationship(back_populates="feedback")
|
||||||
39
apps/pool-hub/src/poolhub/redis_cache.py
Normal file
39
apps/pool-hub/src/poolhub/redis_cache.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
|
||||||
|
import redis.asyncio as redis
|
||||||
|
|
||||||
|
from .settings import settings
|
||||||
|
|
||||||
|
_redis_client: redis.Redis | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def create_redis() -> redis.Redis:
|
||||||
|
global _redis_client
|
||||||
|
if _redis_client is None:
|
||||||
|
_redis_client = redis.from_url(
|
||||||
|
settings.redis_url,
|
||||||
|
max_connections=settings.redis_max_connections,
|
||||||
|
encoding="utf-8",
|
||||||
|
decode_responses=True,
|
||||||
|
)
|
||||||
|
return _redis_client
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_client() -> redis.Redis:
|
||||||
|
if _redis_client is None:
|
||||||
|
return create_redis()
|
||||||
|
return _redis_client
|
||||||
|
|
||||||
|
|
||||||
|
async def get_redis() -> AsyncGenerator[redis.Redis, None]:
|
||||||
|
client = get_redis_client()
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
async def close_redis() -> None:
|
||||||
|
global _redis_client
|
||||||
|
if _redis_client is not None:
|
||||||
|
await _redis_client.close()
|
||||||
|
_redis_client = None
|
||||||
11
apps/pool-hub/src/poolhub/repositories/__init__.py
Normal file
11
apps/pool-hub/src/poolhub/repositories/__init__.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
"""Repository layer for Pool Hub."""
|
||||||
|
|
||||||
|
from .miner_repository import MinerRepository
|
||||||
|
from .match_repository import MatchRepository
|
||||||
|
from .feedback_repository import FeedbackRepository
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"MinerRepository",
|
||||||
|
"MatchRepository",
|
||||||
|
"FeedbackRepository",
|
||||||
|
]
|
||||||
@ -0,0 +1,81 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Iterable, List, Optional
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ..models import Feedback
|
||||||
|
from ..storage.redis_keys import RedisKeys
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FeedbackRepository:
|
||||||
|
"""Persists coordinator feedback and emits Redis notifications."""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession, redis: Redis) -> None:
|
||||||
|
self._session = session
|
||||||
|
self._redis = redis
|
||||||
|
|
||||||
|
async def add_feedback(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
job_id: str,
|
||||||
|
miner_id: str,
|
||||||
|
outcome: str,
|
||||||
|
latency_ms: Optional[int] = None,
|
||||||
|
fail_code: Optional[str] = None,
|
||||||
|
tokens_spent: Optional[float] = None,
|
||||||
|
) -> Feedback:
|
||||||
|
feedback = Feedback(
|
||||||
|
job_id=job_id,
|
||||||
|
miner_id=miner_id,
|
||||||
|
outcome=outcome,
|
||||||
|
latency_ms=latency_ms,
|
||||||
|
fail_code=fail_code,
|
||||||
|
tokens_spent=tokens_spent,
|
||||||
|
created_at=dt.datetime.utcnow(),
|
||||||
|
)
|
||||||
|
self._session.add(feedback)
|
||||||
|
await self._session.flush()
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"miner_id": miner_id,
|
||||||
|
"outcome": outcome,
|
||||||
|
"latency_ms": latency_ms,
|
||||||
|
"fail_code": fail_code,
|
||||||
|
"tokens_spent": tokens_spent,
|
||||||
|
"created_at": feedback.created_at.isoformat() if feedback.created_at else None,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
await self._redis.publish(RedisKeys.feedback_channel(), json.dumps(payload))
|
||||||
|
except Exception as exc: # pragma: no cover - defensive
|
||||||
|
logger.warning("Failed to publish feedback event for job %s: %s", job_id, exc)
|
||||||
|
return feedback
|
||||||
|
|
||||||
|
async def list_feedback_for_miner(self, miner_id: str, limit: int = 50) -> List[Feedback]:
|
||||||
|
stmt = (
|
||||||
|
select(Feedback)
|
||||||
|
.where(Feedback.miner_id == miner_id)
|
||||||
|
.order_by(Feedback.created_at.desc())
|
||||||
|
.limit(limit)
|
||||||
|
)
|
||||||
|
result = await self._session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def list_feedback_for_job(self, job_id: str, limit: int = 50) -> List[Feedback]:
|
||||||
|
stmt = (
|
||||||
|
select(Feedback)
|
||||||
|
.where(Feedback.job_id == job_id)
|
||||||
|
.order_by(Feedback.created_at.desc())
|
||||||
|
.limit(limit)
|
||||||
|
)
|
||||||
|
result = await self._session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
122
apps/pool-hub/src/poolhub/repositories/match_repository.py
Normal file
122
apps/pool-hub/src/poolhub/repositories/match_repository.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
from typing import Iterable, List, Optional, Sequence
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy import Select, select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ..models import MatchRequest, MatchResult
|
||||||
|
from ..storage.redis_keys import RedisKeys
|
||||||
|
|
||||||
|
|
||||||
|
class MatchRepository:
|
||||||
|
"""Handles match request logging, result persistence, and Redis fan-out."""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession, redis: Redis) -> None:
|
||||||
|
self._session = session
|
||||||
|
self._redis = redis
|
||||||
|
|
||||||
|
async def create_request(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
job_id: str,
|
||||||
|
requirements: dict[str, object],
|
||||||
|
hints: Optional[dict[str, object]] = None,
|
||||||
|
top_k: int = 1,
|
||||||
|
enqueue: bool = True,
|
||||||
|
) -> MatchRequest:
|
||||||
|
request = MatchRequest(
|
||||||
|
job_id=job_id,
|
||||||
|
requirements=requirements,
|
||||||
|
hints=hints or {},
|
||||||
|
top_k=top_k,
|
||||||
|
created_at=dt.datetime.utcnow(),
|
||||||
|
)
|
||||||
|
self._session.add(request)
|
||||||
|
await self._session.flush()
|
||||||
|
|
||||||
|
if enqueue:
|
||||||
|
payload = {
|
||||||
|
"request_id": str(request.id),
|
||||||
|
"job_id": request.job_id,
|
||||||
|
"requirements": request.requirements,
|
||||||
|
"hints": request.hints,
|
||||||
|
"top_k": request.top_k,
|
||||||
|
}
|
||||||
|
await self._redis.rpush(RedisKeys.match_requests(), json.dumps(payload))
|
||||||
|
return request
|
||||||
|
|
||||||
|
async def add_results(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
request_id: UUID,
|
||||||
|
candidates: Sequence[dict[str, object]],
|
||||||
|
publish: bool = True,
|
||||||
|
) -> List[MatchResult]:
|
||||||
|
results: List[MatchResult] = []
|
||||||
|
created_at = dt.datetime.utcnow()
|
||||||
|
for candidate in candidates:
|
||||||
|
result = MatchResult(
|
||||||
|
request_id=request_id,
|
||||||
|
miner_id=str(candidate.get("miner_id")),
|
||||||
|
score=float(candidate.get("score", 0.0)),
|
||||||
|
explain=candidate.get("explain"),
|
||||||
|
eta_ms=candidate.get("eta_ms"),
|
||||||
|
price=candidate.get("price"),
|
||||||
|
created_at=created_at,
|
||||||
|
)
|
||||||
|
self._session.add(result)
|
||||||
|
results.append(result)
|
||||||
|
await self._session.flush()
|
||||||
|
|
||||||
|
if publish:
|
||||||
|
request = await self._session.get(MatchRequest, request_id)
|
||||||
|
if request:
|
||||||
|
redis_key = RedisKeys.match_results(request.job_id)
|
||||||
|
await self._redis.delete(redis_key)
|
||||||
|
if results:
|
||||||
|
payloads = [json.dumps(self._result_payload(result)) for result in results]
|
||||||
|
await self._redis.rpush(redis_key, *payloads)
|
||||||
|
await self._redis.expire(redis_key, 300)
|
||||||
|
channel = RedisKeys.match_results_channel(request.job_id)
|
||||||
|
for payload in payloads:
|
||||||
|
await self._redis.publish(channel, payload)
|
||||||
|
return results
|
||||||
|
|
||||||
|
async def get_request(self, request_id: UUID) -> Optional[MatchRequest]:
|
||||||
|
return await self._session.get(MatchRequest, request_id)
|
||||||
|
|
||||||
|
async def list_recent_requests(self, limit: int = 20) -> List[MatchRequest]:
|
||||||
|
stmt: Select[MatchRequest] = (
|
||||||
|
select(MatchRequest)
|
||||||
|
.order_by(MatchRequest.created_at.desc())
|
||||||
|
.limit(limit)
|
||||||
|
)
|
||||||
|
result = await self._session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def list_results_for_job(self, job_id: str, limit: int = 10) -> List[MatchResult]:
|
||||||
|
stmt: Select[MatchResult] = (
|
||||||
|
select(MatchResult)
|
||||||
|
.join(MatchRequest)
|
||||||
|
.where(MatchRequest.job_id == job_id)
|
||||||
|
.order_by(MatchResult.created_at.desc())
|
||||||
|
.limit(limit)
|
||||||
|
)
|
||||||
|
result = await self._session.execute(stmt)
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
def _result_payload(self, result: MatchResult) -> dict[str, object]:
|
||||||
|
return {
|
||||||
|
"request_id": str(result.request_id),
|
||||||
|
"miner_id": result.miner_id,
|
||||||
|
"score": result.score,
|
||||||
|
"explain": result.explain,
|
||||||
|
"eta_ms": result.eta_ms,
|
||||||
|
"price": result.price,
|
||||||
|
"created_at": result.created_at.isoformat() if result.created_at else None,
|
||||||
|
}
|
||||||
181
apps/pool-hub/src/poolhub/repositories/miner_repository.py
Normal file
181
apps/pool-hub/src/poolhub/repositories/miner_repository.py
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime as dt
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy import select, update
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from ..models import Miner, MinerStatus
|
||||||
|
from ..settings import settings
|
||||||
|
from ..storage.redis_keys import RedisKeys
|
||||||
|
|
||||||
|
|
||||||
|
class MinerRepository:
|
||||||
|
"""Coordinates miner registry persistence across PostgreSQL and Redis."""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession, redis: Redis) -> None:
|
||||||
|
self._session = session
|
||||||
|
self._redis = redis
|
||||||
|
|
||||||
|
async def register_miner(
|
||||||
|
self,
|
||||||
|
miner_id: str,
|
||||||
|
api_key_hash: str,
|
||||||
|
*,
|
||||||
|
addr: str,
|
||||||
|
proto: str,
|
||||||
|
gpu_vram_gb: float,
|
||||||
|
gpu_name: Optional[str],
|
||||||
|
cpu_cores: int,
|
||||||
|
ram_gb: float,
|
||||||
|
max_parallel: int,
|
||||||
|
base_price: float,
|
||||||
|
tags: dict[str, str],
|
||||||
|
capabilities: list[str],
|
||||||
|
region: Optional[str],
|
||||||
|
) -> Miner:
|
||||||
|
miner = await self._session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
miner = Miner(
|
||||||
|
miner_id=miner_id,
|
||||||
|
api_key_hash=api_key_hash,
|
||||||
|
addr=addr,
|
||||||
|
proto=proto,
|
||||||
|
gpu_vram_gb=gpu_vram_gb,
|
||||||
|
gpu_name=gpu_name,
|
||||||
|
cpu_cores=cpu_cores,
|
||||||
|
ram_gb=ram_gb,
|
||||||
|
max_parallel=max_parallel,
|
||||||
|
base_price=base_price,
|
||||||
|
tags=tags,
|
||||||
|
capabilities=capabilities,
|
||||||
|
region=region,
|
||||||
|
)
|
||||||
|
self._session.add(miner)
|
||||||
|
status = MinerStatus(miner_id=miner_id)
|
||||||
|
self._session.add(status)
|
||||||
|
else:
|
||||||
|
miner.addr = addr
|
||||||
|
miner.proto = proto
|
||||||
|
miner.gpu_vram_gb = gpu_vram_gb
|
||||||
|
miner.gpu_name = gpu_name
|
||||||
|
miner.cpu_cores = cpu_cores
|
||||||
|
miner.ram_gb = ram_gb
|
||||||
|
miner.max_parallel = max_parallel
|
||||||
|
miner.base_price = base_price
|
||||||
|
miner.tags = tags
|
||||||
|
miner.capabilities = capabilities
|
||||||
|
miner.region = region
|
||||||
|
|
||||||
|
miner.last_seen_at = dt.datetime.utcnow()
|
||||||
|
|
||||||
|
await self._session.flush()
|
||||||
|
await self._sync_miner_to_redis(miner_id)
|
||||||
|
return miner
|
||||||
|
|
||||||
|
async def update_status(
|
||||||
|
self,
|
||||||
|
miner_id: str,
|
||||||
|
*,
|
||||||
|
queue_len: Optional[int] = None,
|
||||||
|
busy: Optional[bool] = None,
|
||||||
|
avg_latency_ms: Optional[int] = None,
|
||||||
|
temp_c: Optional[int] = None,
|
||||||
|
mem_free_gb: Optional[float] = None,
|
||||||
|
) -> None:
|
||||||
|
stmt = (
|
||||||
|
update(MinerStatus)
|
||||||
|
.where(MinerStatus.miner_id == miner_id)
|
||||||
|
.values(
|
||||||
|
{
|
||||||
|
k: v
|
||||||
|
for k, v in {
|
||||||
|
"queue_len": queue_len,
|
||||||
|
"busy": busy,
|
||||||
|
"avg_latency_ms": avg_latency_ms,
|
||||||
|
"temp_c": temp_c,
|
||||||
|
"mem_free_gb": mem_free_gb,
|
||||||
|
"updated_at": dt.datetime.utcnow(),
|
||||||
|
}.items()
|
||||||
|
if v is not None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await self._session.execute(stmt)
|
||||||
|
|
||||||
|
miner = await self._session.get(Miner, miner_id)
|
||||||
|
if miner:
|
||||||
|
miner.last_seen_at = dt.datetime.utcnow()
|
||||||
|
await self._session.flush()
|
||||||
|
await self._sync_miner_to_redis(miner_id)
|
||||||
|
|
||||||
|
async def touch_heartbeat(self, miner_id: str) -> None:
|
||||||
|
miner = await self._session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
return
|
||||||
|
miner.last_seen_at = dt.datetime.utcnow()
|
||||||
|
await self._session.flush()
|
||||||
|
await self._sync_miner_to_redis(miner_id)
|
||||||
|
|
||||||
|
async def get_miner(self, miner_id: str) -> Optional[Miner]:
|
||||||
|
return await self._session.get(Miner, miner_id)
|
||||||
|
|
||||||
|
async def iter_miners(self) -> List[Miner]:
|
||||||
|
result = await self._session.execute(select(Miner))
|
||||||
|
return list(result.scalars().all())
|
||||||
|
|
||||||
|
async def get_status(self, miner_id: str) -> Optional[MinerStatus]:
|
||||||
|
return await self._session.get(MinerStatus, miner_id)
|
||||||
|
|
||||||
|
async def list_active_miners(self) -> List[Tuple[Miner, Optional[MinerStatus], float]]:
|
||||||
|
stmt = select(Miner, MinerStatus).join(MinerStatus, MinerStatus.miner_id == Miner.miner_id, isouter=True)
|
||||||
|
result = await self._session.execute(stmt)
|
||||||
|
records: List[Tuple[Miner, Optional[MinerStatus], float]] = []
|
||||||
|
for miner, status in result.all():
|
||||||
|
score = self._compute_score(miner, status)
|
||||||
|
records.append((miner, status, score))
|
||||||
|
return records
|
||||||
|
|
||||||
|
async def _sync_miner_to_redis(self, miner_id: str) -> None:
|
||||||
|
miner = await self._session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
return
|
||||||
|
status = await self._session.get(MinerStatus, miner_id)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"miner_id": miner.miner_id,
|
||||||
|
"addr": miner.addr,
|
||||||
|
"proto": miner.proto,
|
||||||
|
"region": miner.region or "",
|
||||||
|
"gpu_vram_gb": str(miner.gpu_vram_gb),
|
||||||
|
"ram_gb": str(miner.ram_gb),
|
||||||
|
"max_parallel": str(miner.max_parallel),
|
||||||
|
"base_price": str(miner.base_price),
|
||||||
|
"trust_score": str(miner.trust_score),
|
||||||
|
"queue_len": str(status.queue_len if status else 0),
|
||||||
|
"busy": str(status.busy if status else False),
|
||||||
|
}
|
||||||
|
|
||||||
|
redis_key = RedisKeys.miner_hash(miner_id)
|
||||||
|
await self._redis.hset(redis_key, mapping=payload)
|
||||||
|
await self._redis.expire(redis_key, settings.session_ttl_seconds + settings.heartbeat_grace_seconds)
|
||||||
|
|
||||||
|
score = self._compute_score(miner, status)
|
||||||
|
ranking_key = RedisKeys.miner_rankings(miner.region)
|
||||||
|
await self._redis.zadd(ranking_key, {miner_id: score})
|
||||||
|
await self._redis.expire(ranking_key, settings.session_ttl_seconds + settings.heartbeat_grace_seconds)
|
||||||
|
|
||||||
|
def _compute_score(self, miner: Miner, status: Optional[MinerStatus]) -> float:
|
||||||
|
load_factor = 1.0
|
||||||
|
if status and miner.max_parallel:
|
||||||
|
utilization = min(status.queue_len / max(miner.max_parallel, 1), 1.0)
|
||||||
|
load_factor = 1.0 - utilization
|
||||||
|
price_factor = 1.0 if miner.base_price <= 0 else min(1.0, 1.0 / miner.base_price)
|
||||||
|
trust_factor = max(miner.trust_score, 0.0)
|
||||||
|
return (settings.default_score_weights.capability * 1.0) + (
|
||||||
|
settings.default_score_weights.price * price_factor
|
||||||
|
) + (settings.default_score_weights.load * load_factor) + (
|
||||||
|
settings.default_score_weights.trust * trust_factor
|
||||||
|
)
|
||||||
59
apps/pool-hub/src/poolhub/settings.py
Normal file
59
apps/pool-hub/src/poolhub/settings.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from functools import lru_cache
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from pydantic import AnyHttpUrl, BaseModel, Field
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class ScoreWeights(BaseModel):
|
||||||
|
capability: float = Field(default=0.40, alias="cap")
|
||||||
|
price: float = Field(default=0.20)
|
||||||
|
latency: float = Field(default=0.20)
|
||||||
|
trust: float = Field(default=0.15)
|
||||||
|
load: float = Field(default=0.05)
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(populate_by_name=True)
|
||||||
|
|
||||||
|
def as_vector(self) -> List[float]:
|
||||||
|
return [self.capability, self.price, self.latency, self.trust, self.load]
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(env_prefix="poolhub_", env_file=".env", case_sensitive=False)
|
||||||
|
|
||||||
|
app_name: str = "AITBC Pool Hub"
|
||||||
|
bind_host: str = Field(default="127.0.0.1")
|
||||||
|
bind_port: int = Field(default=8203)
|
||||||
|
|
||||||
|
coordinator_shared_secret: str = Field(default="changeme")
|
||||||
|
|
||||||
|
postgres_dsn: str = Field(default="postgresql+asyncpg://poolhub:poolhub@127.0.0.1:5432/aitbc")
|
||||||
|
postgres_pool_min: int = Field(default=1)
|
||||||
|
postgres_pool_max: int = Field(default=10)
|
||||||
|
|
||||||
|
redis_url: str = Field(default="redis://127.0.0.1:6379/4")
|
||||||
|
redis_max_connections: int = Field(default=32)
|
||||||
|
|
||||||
|
session_ttl_seconds: int = Field(default=60)
|
||||||
|
heartbeat_grace_seconds: int = Field(default=120)
|
||||||
|
|
||||||
|
default_score_weights: ScoreWeights = Field(default_factory=ScoreWeights)
|
||||||
|
|
||||||
|
allowed_origins: List[AnyHttpUrl] = Field(default_factory=list)
|
||||||
|
|
||||||
|
prometheus_namespace: str = Field(default="poolhub")
|
||||||
|
|
||||||
|
def asgi_kwargs(self) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"title": self.app_name,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def get_settings() -> Settings:
|
||||||
|
return Settings()
|
||||||
|
|
||||||
|
|
||||||
|
settings = get_settings()
|
||||||
5
apps/pool-hub/src/poolhub/storage/__init__.py
Normal file
5
apps/pool-hub/src/poolhub/storage/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Storage utilities for the Pool Hub service."""
|
||||||
|
|
||||||
|
from .redis_keys import RedisKeys
|
||||||
|
|
||||||
|
__all__ = ["RedisKeys"]
|
||||||
40
apps/pool-hub/src/poolhub/storage/redis_keys.py
Normal file
40
apps/pool-hub/src/poolhub/storage/redis_keys.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
|
||||||
|
class RedisKeys:
|
||||||
|
namespace: Final[str] = "poolhub"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def miner_hash(cls, miner_id: str) -> str:
|
||||||
|
return f"{cls.namespace}:miner:{miner_id}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def miner_rankings(cls, region: str | None = None) -> str:
|
||||||
|
suffix = region or "global"
|
||||||
|
return f"{cls.namespace}:rankings:{suffix}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def miner_session(cls, session_token: str) -> str:
|
||||||
|
return f"{cls.namespace}:session:{session_token}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def heartbeat_stream(cls) -> str:
|
||||||
|
return f"{cls.namespace}:heartbeat-stream"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def match_requests(cls) -> str:
|
||||||
|
return f"{cls.namespace}:match-requests"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def match_results(cls, job_id: str) -> str:
|
||||||
|
return f"{cls.namespace}:match-results:{job_id}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def feedback_channel(cls) -> str:
|
||||||
|
return f"{cls.namespace}:events:feedback"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def match_results_channel(cls, job_id: str) -> str:
|
||||||
|
return f"{cls.namespace}:events:match-results:{job_id}"
|
||||||
63
apps/pool-hub/tests/conftest.py
Normal file
63
apps/pool-hub/tests/conftest.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from redis.asyncio import Redis
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
||||||
|
|
||||||
|
BASE_DIR = Path(__file__).resolve().parents[2]
|
||||||
|
POOLHUB_SRC = BASE_DIR / "pool-hub" / "src"
|
||||||
|
if str(POOLHUB_SRC) not in sys.path:
|
||||||
|
sys.path.insert(0, str(POOLHUB_SRC))
|
||||||
|
|
||||||
|
from poolhub.models import Base
|
||||||
|
|
||||||
|
|
||||||
|
def _get_required_env(name: str) -> str:
|
||||||
|
value = os.getenv(name)
|
||||||
|
if not value:
|
||||||
|
pytest.skip(f"Set {name} to run Pool Hub integration tests")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture()
|
||||||
|
async def db_engine() -> AsyncEngine:
|
||||||
|
dsn = _get_required_env("POOLHUB_TEST_POSTGRES_DSN")
|
||||||
|
engine = create_async_engine(dsn, pool_pre_ping=True)
|
||||||
|
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
yield engine
|
||||||
|
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.drop_all)
|
||||||
|
await engine.dispose()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def db_session(db_engine: AsyncEngine) -> AsyncSession:
|
||||||
|
session_factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
|
||||||
|
async with session_factory() as session:
|
||||||
|
yield session
|
||||||
|
await session.rollback()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture()
|
||||||
|
async def redis_client() -> Redis:
|
||||||
|
redis_url = _get_required_env("POOLHUB_TEST_REDIS_URL")
|
||||||
|
client = Redis.from_url(redis_url, encoding="utf-8", decode_responses=True)
|
||||||
|
await client.flushdb()
|
||||||
|
yield client
|
||||||
|
await client.flushdb()
|
||||||
|
await client.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture(autouse=True)
|
||||||
|
async def _clear_redis(redis_client: Redis) -> None:
|
||||||
|
await redis_client.flushdb()
|
||||||
153
apps/pool-hub/tests/test_api.py
Normal file
153
apps/pool-hub/tests/test_api.py
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
from httpx import AsyncClient
|
||||||
|
from sqlalchemy.ext.asyncio import async_sessionmaker
|
||||||
|
|
||||||
|
from poolhub.app import deps
|
||||||
|
from poolhub.app.main import create_app
|
||||||
|
from poolhub.app.prometheus import reset_metrics
|
||||||
|
from poolhub.repositories.miner_repository import MinerRepository
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture()
|
||||||
|
async def async_client(db_engine, redis_client): # noqa: F811
|
||||||
|
async def _session_override():
|
||||||
|
factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
|
||||||
|
async with factory() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
async def _redis_override():
|
||||||
|
yield redis_client
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
app.dependency_overrides[deps.db_session_dep] = _session_override
|
||||||
|
app.dependency_overrides[deps.redis_dep] = _redis_override
|
||||||
|
reset_metrics()
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://testserver") as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_match_endpoint(async_client, db_session, redis_client): # noqa: F811
|
||||||
|
repo = MinerRepository(db_session, redis_client)
|
||||||
|
await repo.register_miner(
|
||||||
|
miner_id="miner-1",
|
||||||
|
api_key_hash="hash",
|
||||||
|
addr="127.0.0.1",
|
||||||
|
proto="grpc",
|
||||||
|
gpu_vram_gb=16,
|
||||||
|
gpu_name="A100",
|
||||||
|
cpu_cores=32,
|
||||||
|
ram_gb=128,
|
||||||
|
max_parallel=4,
|
||||||
|
base_price=0.8,
|
||||||
|
tags={"tier": "gold"},
|
||||||
|
capabilities=["embedding"],
|
||||||
|
region="eu",
|
||||||
|
)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
response = await async_client.post(
|
||||||
|
"/v1/match",
|
||||||
|
json={
|
||||||
|
"job_id": "job-123",
|
||||||
|
"requirements": {"min_vram_gb": 8},
|
||||||
|
"hints": {"region": "eu"},
|
||||||
|
"top_k": 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
payload = response.json()
|
||||||
|
assert payload["job_id"] == "job-123"
|
||||||
|
assert len(payload["candidates"]) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_match_endpoint_no_miners(async_client):
|
||||||
|
response = await async_client.post(
|
||||||
|
"/v1/match",
|
||||||
|
json={"job_id": "empty", "requirements": {}, "hints": {}, "top_k": 2},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200
|
||||||
|
payload = response.json()
|
||||||
|
assert payload["candidates"] == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_endpoint(async_client): # noqa: F811
|
||||||
|
response = await async_client.get("/v1/health")
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["status"] in {"ok", "degraded"}
|
||||||
|
assert "db_error" in data
|
||||||
|
assert "redis_error" in data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_endpoint_degraded(db_engine, redis_client): # noqa: F811
|
||||||
|
async def _session_override():
|
||||||
|
factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
|
||||||
|
async with factory() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
class FailingRedis:
|
||||||
|
async def ping(self) -> None:
|
||||||
|
raise RuntimeError("redis down")
|
||||||
|
|
||||||
|
def __getattr__(self, _: str) -> None: # pragma: no cover - minimal stub
|
||||||
|
raise RuntimeError("redis down")
|
||||||
|
|
||||||
|
async def _redis_override():
|
||||||
|
yield FailingRedis()
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
app.dependency_overrides[deps.db_session_dep] = _session_override
|
||||||
|
app.dependency_overrides[deps.redis_dep] = _redis_override
|
||||||
|
reset_metrics()
|
||||||
|
|
||||||
|
async with AsyncClient(app=app, base_url="http://testserver") as client:
|
||||||
|
response = await client.get("/v1/health")
|
||||||
|
assert response.status_code == 200
|
||||||
|
payload = response.json()
|
||||||
|
assert payload["status"] == "degraded"
|
||||||
|
assert payload["redis_error"]
|
||||||
|
assert payload["db_error"] is None
|
||||||
|
|
||||||
|
app.dependency_overrides.clear()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_metrics_endpoint(async_client):
|
||||||
|
baseline = await async_client.get("/metrics")
|
||||||
|
before = _extract_counter(baseline.text, "poolhub_match_requests_total")
|
||||||
|
|
||||||
|
for _ in range(2):
|
||||||
|
await async_client.post(
|
||||||
|
"/v1/match",
|
||||||
|
json={"job_id": str(uuid.uuid4()), "requirements": {}, "hints": {}, "top_k": 1},
|
||||||
|
)
|
||||||
|
|
||||||
|
updated = await async_client.get("/metrics")
|
||||||
|
after = _extract_counter(updated.text, "poolhub_match_requests_total")
|
||||||
|
assert after >= before + 2
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_counter(metrics_text: str, metric: str) -> float:
|
||||||
|
for line in metrics_text.splitlines():
|
||||||
|
if line.startswith(metric):
|
||||||
|
parts = line.split()
|
||||||
|
if len(parts) >= 2:
|
||||||
|
try:
|
||||||
|
return float(parts[1])
|
||||||
|
except ValueError: # pragma: no cover
|
||||||
|
return 0.0
|
||||||
|
return 0.0
|
||||||
96
apps/pool-hub/tests/test_repositories.py
Normal file
96
apps/pool-hub/tests/test_repositories.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from poolhub.repositories.feedback_repository import FeedbackRepository
|
||||||
|
from poolhub.repositories.match_repository import MatchRepository
|
||||||
|
from poolhub.repositories.miner_repository import MinerRepository
|
||||||
|
from poolhub.storage.redis_keys import RedisKeys
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_register_miner_persists_and_syncs(db_session, redis_client):
|
||||||
|
repo = MinerRepository(db_session, redis_client)
|
||||||
|
|
||||||
|
await repo.register_miner(
|
||||||
|
miner_id="miner-1",
|
||||||
|
api_key_hash="hash",
|
||||||
|
addr="127.0.0.1",
|
||||||
|
proto="grpc",
|
||||||
|
gpu_vram_gb=16,
|
||||||
|
gpu_name="A100",
|
||||||
|
cpu_cores=32,
|
||||||
|
ram_gb=128,
|
||||||
|
max_parallel=4,
|
||||||
|
base_price=0.8,
|
||||||
|
tags={"tier": "gold"},
|
||||||
|
capabilities=["embedding"],
|
||||||
|
region="eu",
|
||||||
|
)
|
||||||
|
|
||||||
|
miner = await repo.get_miner("miner-1")
|
||||||
|
assert miner is not None
|
||||||
|
assert miner.addr == "127.0.0.1"
|
||||||
|
|
||||||
|
redis_hash = await redis_client.hgetall(RedisKeys.miner_hash("miner-1"))
|
||||||
|
assert redis_hash["miner_id"] == "miner-1"
|
||||||
|
ranking = await redis_client.zscore(RedisKeys.miner_rankings("eu"), "miner-1")
|
||||||
|
assert ranking is not None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_match_request_flow(db_session, redis_client):
|
||||||
|
match_repo = MatchRepository(db_session, redis_client)
|
||||||
|
|
||||||
|
req = await match_repo.create_request(
|
||||||
|
job_id="job-123",
|
||||||
|
requirements={"min_vram_gb": 8},
|
||||||
|
hints={"region": "eu"},
|
||||||
|
top_k=2,
|
||||||
|
)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
queue_entry = await redis_client.lpop(RedisKeys.match_requests())
|
||||||
|
assert queue_entry is not None
|
||||||
|
payload = json.loads(queue_entry)
|
||||||
|
assert payload["job_id"] == "job-123"
|
||||||
|
|
||||||
|
await match_repo.add_results(
|
||||||
|
request_id=req.id,
|
||||||
|
candidates=[
|
||||||
|
{"miner_id": "miner-1", "score": 0.9, "explain": "fit"},
|
||||||
|
{"miner_id": "miner-2", "score": 0.8, "explain": "backup"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
results = await match_repo.list_results_for_job("job-123")
|
||||||
|
assert len(results) == 2
|
||||||
|
|
||||||
|
redis_results = await redis_client.lrange(RedisKeys.match_results("job-123"), 0, -1)
|
||||||
|
assert len(redis_results) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_feedback_repository(db_session, redis_client):
|
||||||
|
feedback_repo = FeedbackRepository(db_session, redis_client)
|
||||||
|
|
||||||
|
feedback = await feedback_repo.add_feedback(
|
||||||
|
job_id="job-321",
|
||||||
|
miner_id="miner-1",
|
||||||
|
outcome="completed",
|
||||||
|
latency_ms=1200,
|
||||||
|
tokens_spent=1.5,
|
||||||
|
)
|
||||||
|
await db_session.commit()
|
||||||
|
|
||||||
|
rows = await feedback_repo.list_feedback_for_job("job-321")
|
||||||
|
assert len(rows) == 1
|
||||||
|
assert rows[0].outcome == "completed"
|
||||||
|
|
||||||
|
# Ensure Redis publish occurred by checking pubsub message count via monitor list (best effort)
|
||||||
|
# Redis doesn't buffer publishes for inspection, so this is a smoke check ensuring repository returns object
|
||||||
|
assert feedback.miner_id == "miner-1"
|
||||||
@ -1,11 +1,14 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
from .deps import get_receipt_service
|
from .deps import get_receipt_service, get_keystore, get_ledger
|
||||||
from .models import ReceiptVerificationModel, from_validation_result
|
from .models import ReceiptVerificationModel, from_validation_result
|
||||||
|
from .keystore.service import KeystoreService
|
||||||
|
from .ledger_mock import SQLiteLedgerAdapter
|
||||||
from .receipts.service import ReceiptVerifierService
|
from .receipts.service import ReceiptVerifierService
|
||||||
|
|
||||||
router = APIRouter(tags=["jsonrpc"])
|
router = APIRouter(tags=["jsonrpc"])
|
||||||
@ -24,6 +27,8 @@ def _response(result: Optional[Dict[str, Any]] = None, error: Optional[Dict[str,
|
|||||||
def handle_jsonrpc(
|
def handle_jsonrpc(
|
||||||
request: Dict[str, Any],
|
request: Dict[str, Any],
|
||||||
service: ReceiptVerifierService = Depends(get_receipt_service),
|
service: ReceiptVerifierService = Depends(get_receipt_service),
|
||||||
|
keystore: KeystoreService = Depends(get_keystore),
|
||||||
|
ledger: SQLiteLedgerAdapter = Depends(get_ledger),
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
method = request.get("method")
|
method = request.get("method")
|
||||||
params = request.get("params") or {}
|
params = request.get("params") or {}
|
||||||
@ -46,4 +51,68 @@ def handle_jsonrpc(
|
|||||||
results = [from_validation_result(item).model_dump() for item in service.verify_history(str(job_id))]
|
results = [from_validation_result(item).model_dump() for item in service.verify_history(str(job_id))]
|
||||||
return _response(result={"items": results}, request_id=request_id)
|
return _response(result={"items": results}, request_id=request_id)
|
||||||
|
|
||||||
|
if method == "wallet.list":
|
||||||
|
items = []
|
||||||
|
for record in keystore.list_records():
|
||||||
|
ledger_record = ledger.get_wallet(record.wallet_id)
|
||||||
|
metadata = ledger_record.metadata if ledger_record else record.metadata
|
||||||
|
items.append({"wallet_id": record.wallet_id, "public_key": record.public_key, "metadata": metadata})
|
||||||
|
return _response(result={"items": items}, request_id=request_id)
|
||||||
|
|
||||||
|
if method == "wallet.create":
|
||||||
|
wallet_id = params.get("wallet_id")
|
||||||
|
password = params.get("password")
|
||||||
|
metadata = params.get("metadata") or {}
|
||||||
|
secret_b64 = params.get("secret_key")
|
||||||
|
if not wallet_id or not password:
|
||||||
|
return _response(error={"code": -32602, "message": "wallet_id and password required"}, request_id=request_id)
|
||||||
|
secret = base64.b64decode(secret_b64) if secret_b64 else None
|
||||||
|
record = keystore.create_wallet(wallet_id=wallet_id, password=password, secret=secret, metadata=metadata)
|
||||||
|
ledger.upsert_wallet(record.wallet_id, record.public_key, record.metadata)
|
||||||
|
ledger.record_event(record.wallet_id, "created", {"metadata": record.metadata})
|
||||||
|
return _response(
|
||||||
|
result={
|
||||||
|
"wallet": {
|
||||||
|
"wallet_id": record.wallet_id,
|
||||||
|
"public_key": record.public_key,
|
||||||
|
"metadata": record.metadata,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
request_id=request_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if method == "wallet.unlock":
|
||||||
|
wallet_id = params.get("wallet_id")
|
||||||
|
password = params.get("password")
|
||||||
|
if not wallet_id or not password:
|
||||||
|
return _response(error={"code": -32602, "message": "wallet_id and password required"}, request_id=request_id)
|
||||||
|
try:
|
||||||
|
keystore.unlock_wallet(wallet_id, password)
|
||||||
|
ledger.record_event(wallet_id, "unlocked", {"success": True})
|
||||||
|
return _response(result={"wallet_id": wallet_id, "unlocked": True}, request_id=request_id)
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
ledger.record_event(wallet_id, "unlocked", {"success": False})
|
||||||
|
return _response(error={"code": -32001, "message": "invalid credentials"}, request_id=request_id)
|
||||||
|
|
||||||
|
if method == "wallet.sign":
|
||||||
|
wallet_id = params.get("wallet_id")
|
||||||
|
password = params.get("password")
|
||||||
|
message_b64 = params.get("message")
|
||||||
|
if not wallet_id or not password or not message_b64:
|
||||||
|
return _response(error={"code": -32602, "message": "wallet_id, password, message required"}, request_id=request_id)
|
||||||
|
try:
|
||||||
|
message = base64.b64decode(message_b64)
|
||||||
|
except Exception:
|
||||||
|
return _response(error={"code": -32602, "message": "invalid base64 message"}, request_id=request_id)
|
||||||
|
|
||||||
|
try:
|
||||||
|
signature = keystore.sign_message(wallet_id, password, message)
|
||||||
|
ledger.record_event(wallet_id, "sign", {"success": True})
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
ledger.record_event(wallet_id, "sign", {"success": False})
|
||||||
|
return _response(error={"code": -32001, "message": "invalid credentials"}, request_id=request_id)
|
||||||
|
|
||||||
|
signature_b64 = base64.b64encode(signature).decode()
|
||||||
|
return _response(result={"wallet_id": wallet_id, "signature": signature_b64}, request_id=request_id)
|
||||||
|
|
||||||
return _response(error={"code": -32601, "message": "Method not found"}, request_id=request_id)
|
return _response(error={"code": -32601, "message": "Method not found"}, request_id=request_id)
|
||||||
|
|||||||
@ -1,18 +1,52 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
import base64
|
||||||
|
|
||||||
from .deps import get_receipt_service
|
import logging
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status, Request
|
||||||
|
|
||||||
|
from .deps import get_receipt_service, get_keystore, get_ledger
|
||||||
from .models import (
|
from .models import (
|
||||||
ReceiptVerificationListResponse,
|
ReceiptVerificationListResponse,
|
||||||
ReceiptVerificationModel,
|
ReceiptVerificationModel,
|
||||||
ReceiptVerifyResponse,
|
ReceiptVerifyResponse,
|
||||||
SignatureValidationModel,
|
SignatureValidationModel,
|
||||||
|
WalletCreateRequest,
|
||||||
|
WalletCreateResponse,
|
||||||
|
WalletListResponse,
|
||||||
|
WalletUnlockRequest,
|
||||||
|
WalletUnlockResponse,
|
||||||
|
WalletSignRequest,
|
||||||
|
WalletSignResponse,
|
||||||
|
WalletDescriptor,
|
||||||
from_validation_result,
|
from_validation_result,
|
||||||
)
|
)
|
||||||
|
from .keystore.service import KeystoreService
|
||||||
|
from .ledger_mock import SQLiteLedgerAdapter
|
||||||
from .receipts.service import ReceiptValidationResult, ReceiptVerifierService
|
from .receipts.service import ReceiptValidationResult, ReceiptVerifierService
|
||||||
|
from .security import RateLimiter, wipe_buffer
|
||||||
|
|
||||||
router = APIRouter(prefix="/v1", tags=["receipts"])
|
logger = logging.getLogger(__name__)
|
||||||
|
_rate_limiter = RateLimiter(max_requests=30, window_seconds=60)
|
||||||
|
|
||||||
|
|
||||||
|
def _rate_key(action: str, request: Request, wallet_id: Optional[str] = None) -> str:
|
||||||
|
host = request.client.host if request.client else "unknown"
|
||||||
|
parts = [action, host]
|
||||||
|
if wallet_id:
|
||||||
|
parts.append(wallet_id)
|
||||||
|
return ":".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _enforce_limit(action: str, request: Request, wallet_id: Optional[str] = None) -> None:
|
||||||
|
key = _rate_key(action, request, wallet_id)
|
||||||
|
if not _rate_limiter.allow(key):
|
||||||
|
raise HTTPException(status_code=status.HTTP_429_TOO_MANY_REQUESTS, detail="rate limit exceeded")
|
||||||
|
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/v1", tags=["wallets", "receipts"])
|
||||||
|
|
||||||
|
|
||||||
def _result_to_response(result: ReceiptValidationResult) -> ReceiptVerifyResponse:
|
def _result_to_response(result: ReceiptValidationResult) -> ReceiptVerifyResponse:
|
||||||
@ -47,3 +81,97 @@ def verify_receipt_history(
|
|||||||
results = service.verify_history(job_id)
|
results = service.verify_history(job_id)
|
||||||
items = [from_validation_result(result) for result in results]
|
items = [from_validation_result(result) for result in results]
|
||||||
return ReceiptVerificationListResponse(items=items)
|
return ReceiptVerificationListResponse(items=items)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/wallets", response_model=WalletListResponse, summary="List wallets")
|
||||||
|
def list_wallets(
|
||||||
|
keystore: KeystoreService = Depends(get_keystore),
|
||||||
|
ledger: SQLiteLedgerAdapter = Depends(get_ledger),
|
||||||
|
) -> WalletListResponse:
|
||||||
|
descriptors = []
|
||||||
|
for record in keystore.list_records():
|
||||||
|
ledger_record = ledger.get_wallet(record.wallet_id)
|
||||||
|
metadata = ledger_record.metadata if ledger_record else record.metadata
|
||||||
|
descriptors.append(
|
||||||
|
WalletDescriptor(wallet_id=record.wallet_id, public_key=record.public_key, metadata=metadata)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/wallets", response_model=WalletCreateResponse, status_code=status.HTTP_201_CREATED, summary="Create wallet")
|
||||||
|
def create_wallet(
|
||||||
|
request: WalletCreateRequest,
|
||||||
|
http_request: Request,
|
||||||
|
keystore: KeystoreService = Depends(get_keystore),
|
||||||
|
ledger: SQLiteLedgerAdapter = Depends(get_ledger),
|
||||||
|
) -> WalletCreateResponse:
|
||||||
|
_enforce_limit("wallet-create", http_request)
|
||||||
|
|
||||||
|
try:
|
||||||
|
secret = base64.b64decode(request.secret_key) if request.secret_key else None
|
||||||
|
except Exception as exc:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="invalid base64 secret") from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
record = keystore.create_wallet(
|
||||||
|
wallet_id=request.wallet_id,
|
||||||
|
password=request.password,
|
||||||
|
secret=secret,
|
||||||
|
metadata=request.metadata,
|
||||||
|
)
|
||||||
|
except ValueError as exc:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
|
||||||
|
|
||||||
|
ledger.upsert_wallet(record.wallet_id, record.public_key, record.metadata)
|
||||||
|
ledger.record_event(record.wallet_id, "created", {"metadata": record.metadata})
|
||||||
|
logger.info("Created wallet", extra={"wallet_id": record.wallet_id})
|
||||||
|
wallet = WalletDescriptor(wallet_id=record.wallet_id, public_key=record.public_key, metadata=record.metadata)
|
||||||
|
return WalletCreateResponse(wallet=wallet)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/wallets/{wallet_id}/unlock", response_model=WalletUnlockResponse, summary="Unlock wallet")
|
||||||
|
def unlock_wallet(
|
||||||
|
wallet_id: str,
|
||||||
|
request: WalletUnlockRequest,
|
||||||
|
http_request: Request,
|
||||||
|
keystore: KeystoreService = Depends(get_keystore),
|
||||||
|
ledger: SQLiteLedgerAdapter = Depends(get_ledger),
|
||||||
|
) -> WalletUnlockResponse:
|
||||||
|
_enforce_limit("wallet-unlock", http_request, wallet_id)
|
||||||
|
try:
|
||||||
|
secret = bytearray(keystore.unlock_wallet(wallet_id, request.password))
|
||||||
|
ledger.record_event(wallet_id, "unlocked", {"success": True})
|
||||||
|
logger.info("Unlocked wallet", extra={"wallet_id": wallet_id})
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
ledger.record_event(wallet_id, "unlocked", {"success": False})
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid credentials")
|
||||||
|
finally:
|
||||||
|
if "secret" in locals():
|
||||||
|
wipe_buffer(secret)
|
||||||
|
# We don't expose the secret in response
|
||||||
|
return WalletUnlockResponse(wallet_id=wallet_id, unlocked=True)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/wallets/{wallet_id}/sign", response_model=WalletSignResponse, summary="Sign payload")
|
||||||
|
def sign_payload(
|
||||||
|
wallet_id: str,
|
||||||
|
request: WalletSignRequest,
|
||||||
|
http_request: Request,
|
||||||
|
keystore: KeystoreService = Depends(get_keystore),
|
||||||
|
ledger: SQLiteLedgerAdapter = Depends(get_ledger),
|
||||||
|
) -> WalletSignResponse:
|
||||||
|
_enforce_limit("wallet-sign", http_request, wallet_id)
|
||||||
|
try:
|
||||||
|
message = base64.b64decode(request.message_base64)
|
||||||
|
except Exception as exc:
|
||||||
|
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="invalid base64 message") from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
signature = keystore.sign_message(wallet_id, request.password, message)
|
||||||
|
ledger.record_event(wallet_id, "sign", {"success": True})
|
||||||
|
logger.debug("Signed payload", extra={"wallet_id": wallet_id})
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
ledger.record_event(wallet_id, "sign", {"success": False})
|
||||||
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="invalid credentials")
|
||||||
|
|
||||||
|
signature_b64 = base64.b64encode(signature).decode()
|
||||||
|
return WalletSignResponse(wallet_id=wallet_id, signature_base64=signature_b64)
|
||||||
|
|||||||
@ -9,6 +9,8 @@ from nacl.bindings import (
|
|||||||
crypto_aead_xchacha20poly1305_ietf_encrypt,
|
crypto_aead_xchacha20poly1305_ietf_encrypt,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..security import wipe_buffer
|
||||||
|
|
||||||
|
|
||||||
class EncryptionError(Exception):
|
class EncryptionError(Exception):
|
||||||
"""Raised when encryption or decryption fails."""
|
"""Raised when encryption or decryption fails."""
|
||||||
@ -50,13 +52,15 @@ class EncryptionSuite:
|
|||||||
raise EncryptionError("encryption failed") from exc
|
raise EncryptionError("encryption failed") from exc
|
||||||
|
|
||||||
def decrypt(self, *, password: str, ciphertext: bytes, salt: bytes, nonce: bytes) -> bytes:
|
def decrypt(self, *, password: str, ciphertext: bytes, salt: bytes, nonce: bytes) -> bytes:
|
||||||
key = self._derive_key(password=password, salt=salt)
|
key_bytes = bytearray(self._derive_key(password=password, salt=salt))
|
||||||
try:
|
try:
|
||||||
return crypto_aead_xchacha20poly1305_ietf_decrypt(
|
return crypto_aead_xchacha20poly1305_ietf_decrypt(
|
||||||
ciphertext=ciphertext,
|
ciphertext=ciphertext,
|
||||||
aad=b"",
|
aad=b"",
|
||||||
nonce=nonce,
|
nonce=nonce,
|
||||||
key=key,
|
key=bytes(key_bytes),
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise EncryptionError("decryption failed") from exc
|
raise EncryptionError("decryption failed") from exc
|
||||||
|
finally:
|
||||||
|
wipe_buffer(key_bytes)
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from functools import lru_cache
|
|||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
|
|
||||||
from .keystore.service import KeystoreService
|
from .keystore.service import KeystoreService
|
||||||
|
from .ledger_mock import SQLiteLedgerAdapter
|
||||||
from .receipts.service import ReceiptVerifierService
|
from .receipts.service import ReceiptVerifierService
|
||||||
from .settings import Settings, settings
|
from .settings import Settings, settings
|
||||||
|
|
||||||
@ -24,3 +25,8 @@ def get_receipt_service(config: Settings = Depends(get_settings)) -> ReceiptVeri
|
|||||||
@lru_cache
|
@lru_cache
|
||||||
def get_keystore() -> KeystoreService:
|
def get_keystore() -> KeystoreService:
|
||||||
return KeystoreService()
|
return KeystoreService()
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache
|
||||||
|
def get_ledger(config: Settings = Depends(get_settings)) -> SQLiteLedgerAdapter:
|
||||||
|
return SQLiteLedgerAdapter(config.ledger_db_path)
|
||||||
|
|||||||
@ -1,16 +1,20 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, Iterable, List, Optional
|
||||||
|
|
||||||
from secrets import token_bytes
|
from secrets import token_bytes
|
||||||
|
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
from ..crypto.encryption import EncryptionSuite, EncryptionError
|
from ..crypto.encryption import EncryptionSuite, EncryptionError
|
||||||
|
from ..security import validate_password_rules, wipe_buffer
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class WalletRecord:
|
class WalletRecord:
|
||||||
wallet_id: str
|
wallet_id: str
|
||||||
|
public_key: str
|
||||||
salt: bytes
|
salt: bytes
|
||||||
nonce: bytes
|
nonce: bytes
|
||||||
ciphertext: bytes
|
ciphertext: bytes
|
||||||
@ -27,14 +31,46 @@ class KeystoreService:
|
|||||||
def list_wallets(self) -> List[str]:
|
def list_wallets(self) -> List[str]:
|
||||||
return list(self._wallets.keys())
|
return list(self._wallets.keys())
|
||||||
|
|
||||||
|
def list_records(self) -> Iterable[WalletRecord]:
|
||||||
|
return list(self._wallets.values())
|
||||||
|
|
||||||
def get_wallet(self, wallet_id: str) -> Optional[WalletRecord]:
|
def get_wallet(self, wallet_id: str) -> Optional[WalletRecord]:
|
||||||
return self._wallets.get(wallet_id)
|
return self._wallets.get(wallet_id)
|
||||||
|
|
||||||
def create_wallet(self, wallet_id: str, password: str, plaintext: bytes, metadata: Optional[Dict[str, str]] = None) -> WalletRecord:
|
def create_wallet(
|
||||||
|
self,
|
||||||
|
wallet_id: str,
|
||||||
|
password: str,
|
||||||
|
secret: Optional[bytes] = None,
|
||||||
|
metadata: Optional[Dict[str, str]] = None,
|
||||||
|
) -> WalletRecord:
|
||||||
|
if wallet_id in self._wallets:
|
||||||
|
raise ValueError("wallet already exists")
|
||||||
|
|
||||||
|
validate_password_rules(password)
|
||||||
|
|
||||||
|
metadata_map = {str(k): str(v) for k, v in (metadata or {}).items()}
|
||||||
|
|
||||||
|
if secret is None:
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
secret_bytes = signing_key.encode()
|
||||||
|
else:
|
||||||
|
if len(secret) != SigningKey.seed_size:
|
||||||
|
raise ValueError("secret key must be 32 bytes")
|
||||||
|
secret_bytes = secret
|
||||||
|
signing_key = SigningKey(secret_bytes)
|
||||||
|
|
||||||
salt = token_bytes(self._encryption.salt_bytes)
|
salt = token_bytes(self._encryption.salt_bytes)
|
||||||
nonce = token_bytes(self._encryption.nonce_bytes)
|
nonce = token_bytes(self._encryption.nonce_bytes)
|
||||||
ciphertext = self._encryption.encrypt(password=password, plaintext=plaintext, salt=salt, nonce=nonce)
|
ciphertext = self._encryption.encrypt(password=password, plaintext=secret_bytes, salt=salt, nonce=nonce)
|
||||||
record = WalletRecord(wallet_id=wallet_id, salt=salt, nonce=nonce, ciphertext=ciphertext, metadata=metadata or {})
|
record = WalletRecord(
|
||||||
|
wallet_id=wallet_id,
|
||||||
|
public_key=signing_key.verify_key.encode().hex(),
|
||||||
|
salt=salt,
|
||||||
|
nonce=nonce,
|
||||||
|
ciphertext=ciphertext,
|
||||||
|
metadata=metadata_map,
|
||||||
|
)
|
||||||
self._wallets[wallet_id] = record
|
self._wallets[wallet_id] = record
|
||||||
return record
|
return record
|
||||||
|
|
||||||
@ -49,3 +85,12 @@ class KeystoreService:
|
|||||||
|
|
||||||
def delete_wallet(self, wallet_id: str) -> bool:
|
def delete_wallet(self, wallet_id: str) -> bool:
|
||||||
return self._wallets.pop(wallet_id, None) is not None
|
return self._wallets.pop(wallet_id, None) is not None
|
||||||
|
|
||||||
|
def sign_message(self, wallet_id: str, password: str, message: bytes) -> bytes:
|
||||||
|
secret_bytes = bytearray(self.unlock_wallet(wallet_id, password))
|
||||||
|
try:
|
||||||
|
signing_key = SigningKey(bytes(secret_bytes))
|
||||||
|
signed = signing_key.sign(message)
|
||||||
|
return signed.signature
|
||||||
|
finally:
|
||||||
|
wipe_buffer(secret_bytes)
|
||||||
|
|||||||
3
apps/wallet-daemon/src/app/ledger_mock/__init__.py
Normal file
3
apps/wallet-daemon/src/app/ledger_mock/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .sqlite_adapter import SQLiteLedgerAdapter, WalletRecord, WalletEvent
|
||||||
|
|
||||||
|
__all__ = ["SQLiteLedgerAdapter", "WalletRecord", "WalletEvent"]
|
||||||
106
apps/wallet-daemon/src/app/ledger_mock/sqlite_adapter.py
Normal file
106
apps/wallet-daemon/src/app/ledger_mock/sqlite_adapter.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sqlite3
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WalletRecord:
|
||||||
|
wallet_id: str
|
||||||
|
public_key: str
|
||||||
|
metadata: dict
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class WalletEvent:
|
||||||
|
wallet_id: str
|
||||||
|
event_type: str
|
||||||
|
payload: dict
|
||||||
|
|
||||||
|
|
||||||
|
class SQLiteLedgerAdapter:
|
||||||
|
def __init__(self, db_path: Path) -> None:
|
||||||
|
self._db_path = db_path
|
||||||
|
self._ensure_schema()
|
||||||
|
|
||||||
|
def _connect(self) -> sqlite3.Connection:
|
||||||
|
conn = sqlite3.connect(self._db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
def _ensure_schema(self) -> None:
|
||||||
|
self._db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with self._connect() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS wallets (
|
||||||
|
wallet_id TEXT PRIMARY KEY,
|
||||||
|
public_key TEXT NOT NULL,
|
||||||
|
metadata TEXT NOT NULL
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS wallet_events (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
wallet_id TEXT NOT NULL,
|
||||||
|
event_type TEXT NOT NULL,
|
||||||
|
payload TEXT NOT NULL,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY(wallet_id) REFERENCES wallets(wallet_id)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def upsert_wallet(self, wallet_id: str, public_key: str, metadata: dict) -> None:
|
||||||
|
payload = json.dumps(metadata)
|
||||||
|
with self._connect() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO wallets(wallet_id, public_key, metadata)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
ON CONFLICT(wallet_id) DO UPDATE SET public_key=excluded.public_key, metadata=excluded.metadata
|
||||||
|
""",
|
||||||
|
(wallet_id, public_key, payload),
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_wallet(self, wallet_id: str) -> Optional[WalletRecord]:
|
||||||
|
with self._connect() as conn:
|
||||||
|
row = conn.execute(
|
||||||
|
"SELECT wallet_id, public_key, metadata FROM wallets WHERE wallet_id = ?",
|
||||||
|
(wallet_id,),
|
||||||
|
).fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return WalletRecord(wallet_id=row["wallet_id"], public_key=row["public_key"], metadata=json.loads(row["metadata"]))
|
||||||
|
|
||||||
|
def list_wallets(self) -> Iterable[WalletRecord]:
|
||||||
|
with self._connect() as conn:
|
||||||
|
rows = conn.execute("SELECT wallet_id, public_key, metadata FROM wallets").fetchall()
|
||||||
|
for row in rows:
|
||||||
|
yield WalletRecord(wallet_id=row["wallet_id"], public_key=row["public_key"], metadata=json.loads(row["metadata"]))
|
||||||
|
|
||||||
|
def record_event(self, wallet_id: str, event_type: str, payload: dict) -> None:
|
||||||
|
data = json.dumps(payload)
|
||||||
|
with self._connect() as conn:
|
||||||
|
conn.execute(
|
||||||
|
"INSERT INTO wallet_events(wallet_id, event_type, payload) VALUES (?, ?, ?)",
|
||||||
|
(wallet_id, event_type, data),
|
||||||
|
)
|
||||||
|
|
||||||
|
def list_events(self, wallet_id: str) -> Iterable[WalletEvent]:
|
||||||
|
with self._connect() as conn:
|
||||||
|
rows = conn.execute(
|
||||||
|
"SELECT wallet_id, event_type, payload FROM wallet_events WHERE wallet_id = ? ORDER BY id",
|
||||||
|
(wallet_id,),
|
||||||
|
).fetchall()
|
||||||
|
for row in rows:
|
||||||
|
yield WalletEvent(
|
||||||
|
wallet_id=row["wallet_id"],
|
||||||
|
event_type=row["event_type"],
|
||||||
|
payload=json.loads(row["payload"]),
|
||||||
|
)
|
||||||
@ -1,6 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import List
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from aitbc_sdk import SignatureValidation
|
from aitbc_sdk import SignatureValidation
|
||||||
|
|
||||||
@ -43,3 +43,43 @@ def from_validation_result(result) -> ReceiptVerificationModel:
|
|||||||
|
|
||||||
class ReceiptVerificationListResponse(BaseModel):
|
class ReceiptVerificationListResponse(BaseModel):
|
||||||
items: List[ReceiptVerificationModel]
|
items: List[ReceiptVerificationModel]
|
||||||
|
|
||||||
|
|
||||||
|
class WalletDescriptor(BaseModel):
|
||||||
|
wallet_id: str
|
||||||
|
public_key: str
|
||||||
|
metadata: Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class WalletListResponse(BaseModel):
|
||||||
|
items: List[WalletDescriptor]
|
||||||
|
|
||||||
|
|
||||||
|
class WalletCreateRequest(BaseModel):
|
||||||
|
wallet_id: str
|
||||||
|
password: str
|
||||||
|
metadata: Dict[str, Any] = {}
|
||||||
|
secret_key: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class WalletCreateResponse(BaseModel):
|
||||||
|
wallet: WalletDescriptor
|
||||||
|
|
||||||
|
|
||||||
|
class WalletUnlockRequest(BaseModel):
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class WalletUnlockResponse(BaseModel):
|
||||||
|
wallet_id: str
|
||||||
|
unlocked: bool
|
||||||
|
|
||||||
|
|
||||||
|
class WalletSignRequest(BaseModel):
|
||||||
|
password: str
|
||||||
|
message_base64: str
|
||||||
|
|
||||||
|
|
||||||
|
class WalletSignResponse(BaseModel):
|
||||||
|
wallet_id: str
|
||||||
|
signature_base64: str
|
||||||
|
|||||||
43
apps/wallet-daemon/src/app/security.py
Normal file
43
apps/wallet-daemon/src/app/security.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimiter:
|
||||||
|
def __init__(self, max_requests: int = 30, window_seconds: int = 60) -> None:
|
||||||
|
self._max_requests = max_requests
|
||||||
|
self._window_seconds = window_seconds
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self._records: dict[str, deque[float]] = defaultdict(deque)
|
||||||
|
|
||||||
|
def allow(self, key: str) -> bool:
|
||||||
|
now = time.monotonic()
|
||||||
|
with self._lock:
|
||||||
|
entries = self._records[key]
|
||||||
|
while entries and now - entries[0] > self._window_seconds:
|
||||||
|
entries.popleft()
|
||||||
|
if len(entries) >= self._max_requests:
|
||||||
|
return False
|
||||||
|
entries.append(now)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_password_rules(password: str) -> None:
|
||||||
|
if len(password) < 12:
|
||||||
|
raise ValueError("password must be at least 12 characters long")
|
||||||
|
if not re.search(r"[A-Z]", password):
|
||||||
|
raise ValueError("password must include at least one uppercase letter")
|
||||||
|
if not re.search(r"[a-z]", password):
|
||||||
|
raise ValueError("password must include at least one lowercase letter")
|
||||||
|
if not re.search(r"\d", password):
|
||||||
|
raise ValueError("password must include at least one digit")
|
||||||
|
if not re.search(r"[^A-Za-z0-9]", password):
|
||||||
|
raise ValueError("password must include at least one symbol")
|
||||||
|
|
||||||
|
|
||||||
|
def wipe_buffer(buffer: bytearray) -> None:
|
||||||
|
for index in range(len(buffer)):
|
||||||
|
buffer[index] = 0
|
||||||
@ -1,5 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
@ -14,6 +16,7 @@ class Settings(BaseSettings):
|
|||||||
coordinator_api_key: str = Field(default="client_dev_key_1", alias="COORDINATOR_API_KEY")
|
coordinator_api_key: str = Field(default="client_dev_key_1", alias="COORDINATOR_API_KEY")
|
||||||
|
|
||||||
rest_prefix: str = Field(default="/v1", alias="REST_PREFIX")
|
rest_prefix: str = Field(default="/v1", alias="REST_PREFIX")
|
||||||
|
ledger_db_path: Path = Field(default=Path("./data/wallet_ledger.db"), alias="LEDGER_DB_PATH")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
env_file = ".env"
|
env_file = ".env"
|
||||||
|
|||||||
38
apps/wallet-daemon/tests/test_ledger.py
Normal file
38
apps/wallet-daemon/tests/test_ledger.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from app.ledger_mock import SQLiteLedgerAdapter
|
||||||
|
|
||||||
|
|
||||||
|
def test_upsert_and_get_wallet(tmp_path: Path) -> None:
|
||||||
|
db_path = tmp_path / "ledger.db"
|
||||||
|
adapter = SQLiteLedgerAdapter(db_path)
|
||||||
|
|
||||||
|
adapter.upsert_wallet("wallet-1", "pubkey", {"label": "primary"})
|
||||||
|
|
||||||
|
record = adapter.get_wallet("wallet-1")
|
||||||
|
assert record is not None
|
||||||
|
assert record.wallet_id == "wallet-1"
|
||||||
|
assert record.public_key == "pubkey"
|
||||||
|
assert record.metadata["label"] == "primary"
|
||||||
|
|
||||||
|
# Update metadata and ensure persistence
|
||||||
|
adapter.upsert_wallet("wallet-1", "pubkey", {"label": "updated"})
|
||||||
|
updated = adapter.get_wallet("wallet-1")
|
||||||
|
assert updated is not None
|
||||||
|
assert updated.metadata["label"] == "updated"
|
||||||
|
|
||||||
|
|
||||||
|
def test_event_ordering(tmp_path: Path) -> None:
|
||||||
|
db_path = tmp_path / "ledger.db"
|
||||||
|
adapter = SQLiteLedgerAdapter(db_path)
|
||||||
|
|
||||||
|
adapter.upsert_wallet("wallet-1", "pubkey", {})
|
||||||
|
adapter.record_event("wallet-1", "created", {"step": 1})
|
||||||
|
adapter.record_event("wallet-1", "unlock", {"step": 2})
|
||||||
|
adapter.record_event("wallet-1", "sign", {"step": 3})
|
||||||
|
|
||||||
|
events = list(adapter.list_events("wallet-1"))
|
||||||
|
assert [event.event_type for event in events] == ["created", "unlock", "sign"]
|
||||||
|
assert [event.payload["step"] for event in events] == [1, 2, 3]
|
||||||
82
apps/wallet-daemon/tests/test_wallet_api.py
Normal file
82
apps/wallet-daemon/tests/test_wallet_api.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import base64
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from aitbc_chain.app import create_app # noqa: I100
|
||||||
|
|
||||||
|
from app.deps import get_keystore, get_ledger
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="client")
|
||||||
|
def client_fixture(tmp_path, monkeypatch):
|
||||||
|
# Override ledger path to temporary directory
|
||||||
|
from app.settings import Settings
|
||||||
|
|
||||||
|
class TestSettings(Settings):
|
||||||
|
ledger_db_path = tmp_path / "ledger.db"
|
||||||
|
|
||||||
|
monkeypatch.setattr("app.deps.get_settings", lambda: TestSettings())
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_wallet(client: TestClient, wallet_id: str, password: str = "Password!234") -> None:
|
||||||
|
payload = {
|
||||||
|
"wallet_id": wallet_id,
|
||||||
|
"password": password,
|
||||||
|
}
|
||||||
|
response = client.post("/v1/wallets", json=payload)
|
||||||
|
assert response.status_code == 201, response.text
|
||||||
|
|
||||||
|
|
||||||
|
def test_wallet_workflow(client: TestClient):
|
||||||
|
wallet_id = "wallet-1"
|
||||||
|
password = "StrongPass!234"
|
||||||
|
|
||||||
|
# Create wallet
|
||||||
|
response = client.post(
|
||||||
|
"/v1/wallets",
|
||||||
|
json={
|
||||||
|
"wallet_id": wallet_id,
|
||||||
|
"password": password,
|
||||||
|
"metadata": {"label": "test"},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert response.status_code == 201, response.text
|
||||||
|
data = response.json()["wallet"]
|
||||||
|
assert data["wallet_id"] == wallet_id
|
||||||
|
assert "public_key" in data
|
||||||
|
|
||||||
|
# List wallets
|
||||||
|
response = client.get("/v1/wallets")
|
||||||
|
assert response.status_code == 200
|
||||||
|
items = response.json()["items"]
|
||||||
|
assert any(item["wallet_id"] == wallet_id for item in items)
|
||||||
|
|
||||||
|
# Unlock wallet
|
||||||
|
response = client.post(f"/v1/wallets/{wallet_id}/unlock", json={"password": password})
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["unlocked"] is True
|
||||||
|
|
||||||
|
# Sign payload
|
||||||
|
message = base64.b64encode(b"hello").decode()
|
||||||
|
response = client.post(
|
||||||
|
f"/v1/wallets/{wallet_id}/sign",
|
||||||
|
json={"password": password, "message_base64": message},
|
||||||
|
)
|
||||||
|
assert response.status_code == 200, response.text
|
||||||
|
signature = response.json()["signature_base64"]
|
||||||
|
assert isinstance(signature, str) and len(signature) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_wallet_password_rules(client: TestClient):
|
||||||
|
response = client.post(
|
||||||
|
"/v1/wallets",
|
||||||
|
json={"wallet_id": "weak", "password": "short"},
|
||||||
|
)
|
||||||
|
assert response.status_code == 400
|
||||||
|
***
|
||||||
17
docs/done.md
17
docs/done.md
@ -41,8 +41,25 @@
|
|||||||
|
|
||||||
- Added `apps/wallet-daemon/src/app/receipts/service.py` providing `ReceiptVerifierService` that fetches and validates receipts via `aitbc_sdk`.
|
- Added `apps/wallet-daemon/src/app/receipts/service.py` providing `ReceiptVerifierService` that fetches and validates receipts via `aitbc_sdk`.
|
||||||
- Created unit tests under `apps/wallet-daemon/tests/test_receipts.py` verifying service behavior.
|
- Created unit tests under `apps/wallet-daemon/tests/test_receipts.py` verifying service behavior.
|
||||||
|
- Implemented wallet SDK receipt ingestion + attestation surfacing in `packages/py/aitbc-sdk/src/receipts.py`, including pagination client, signature verification, and failure diagnostics with full pytest coverage.
|
||||||
|
|
||||||
## Explorer Web
|
## Explorer Web
|
||||||
|
|
||||||
- Initialized a Vite + TypeScript scaffold in `apps/explorer-web/` with `vite.config.ts`, `tsconfig.json`, and placeholder `src/main.ts` content.
|
- Initialized a Vite + TypeScript scaffold in `apps/explorer-web/` with `vite.config.ts`, `tsconfig.json`, and placeholder `src/main.ts` content.
|
||||||
- Installed frontend dependencies locally to unblock editor tooling and TypeScript type resolution.
|
- Installed frontend dependencies locally to unblock editor tooling and TypeScript type resolution.
|
||||||
|
- Implemented `overview` page stats rendering backed by mock block/transaction/receipt fetchers, including robust empty-state handling and TypeScript type fixes.
|
||||||
|
|
||||||
|
## Pool Hub
|
||||||
|
|
||||||
|
- Implemented FastAPI service scaffolding with Redis/PostgreSQL-backed repositories, match/health/metrics endpoints, and Prometheus instrumentation (`apps/pool-hub/src/poolhub/`).
|
||||||
|
- Added Alembic migrations (`apps/pool-hub/migrations/`) and async integration tests covering repositories and endpoints (`apps/pool-hub/tests/`).
|
||||||
|
|
||||||
|
## Solidity Token
|
||||||
|
|
||||||
|
- Implemented attested minting logic in `packages/solidity/aitbc-token/contracts/AIToken.sol` using `AccessControl` role gates and ECDSA signature recovery.
|
||||||
|
- Added Hardhat unit tests in `packages/solidity/aitbc-token/test/aitoken.test.ts` covering successful minting, replay prevention, and invalid attestor signatures.
|
||||||
|
- Configured project TypeScript settings via `packages/solidity/aitbc-token/tsconfig.json` to align Hardhat, Node, and Mocha typings for the contract test suite.
|
||||||
|
|
||||||
|
## JavaScript SDK
|
||||||
|
|
||||||
|
- Delivered fetch-based client wrapper with TypeScript definitions and Vitest coverage under `packages/js/aitbc-sdk/`.
|
||||||
|
|||||||
@ -1,33 +1,33 @@
|
|||||||
# Explorer Web – Task Breakdown
|
# Explorer Web – Task Breakdown
|
||||||
|
|
||||||
## Status (2025-09-27)
|
## Status (2025-09-28)
|
||||||
|
|
||||||
- **Stage 1**: UI scaffolding and mock data remain TODO; no implementation merged yet. Pending work should align with coordinator receipt history once backend endpoints stabilize.
|
- **Stage 1**: Overview page renders block/transaction/receipt summaries from mock data with empty-state fallbacks. Remaining work focuses on blocks/transactions detail UIs, responsive polish, and live data toggle validation.
|
||||||
|
|
||||||
## Stage 1 (MVP)
|
## Stage 1 (MVP)
|
||||||
|
|
||||||
- **Structure & Assets**
|
- **Structure & Assets**
|
||||||
- Populate `apps/explorer-web/public/` with `index.html`, `block.html`, `tx.html`, `address.html`, `receipts.html`, `404.html` scaffolds.
|
- ⏳ Populate `apps/explorer-web/public/` with `index.html`, `block.html`, `tx.html`, `address.html`, `receipts.html`, `404.html` scaffolds.
|
||||||
- Add base stylesheets (`css/base.css`, `css/layout.css`, `css/theme-dark.css`).
|
- ✅ Add base stylesheets (`public/css/base.css`, `public/css/layout.css`, `public/css/theme.css`).
|
||||||
- Include logo and icon assets under `public/assets/`.
|
- ⏳ Include logo and icon assets under `public/assets/`.
|
||||||
|
|
||||||
- **JavaScript Modules**
|
- **TypeScript Modules**
|
||||||
- Implement `js/config.js`, `js/api.js`, `js/store.js`, and `js/utils.js` helpers.
|
- ✅ Provide configuration and data helpers (`src/config.ts`, `src/lib/mockData.ts`, `src/lib/models.ts`).
|
||||||
- Create component modules under `js/components/` (header, footer, searchbox, block-table, tx-table, pager, keyvalue).
|
- ⏳ Add shared store/utilities module for cross-page state.
|
||||||
- Implement page controllers under `js/pages/` for home, block detail, tx detail, address view, receipts view.
|
- ✅ Implement core page controllers and components under `src/pages/` and `src/components/` (overview, blocks, transactions, addresses, receipts, header/footer, data mode toggle).
|
||||||
|
|
||||||
- **Mock Data**
|
- **Mock Data**
|
||||||
- Provide optional mock JSON fixtures under `public/js/vendors/` or `public/mock/`.
|
- ✅ Provide mock JSON fixtures under `public/mock/`.
|
||||||
- Enable mock mode toggle via `CONFIG.USE_MOCK`.
|
- ✅ Enable mock/live mode toggle via `getDataMode()` and `<data-mode-toggle>` components.
|
||||||
|
|
||||||
- **Interaction & UX**
|
- **Interaction & UX**
|
||||||
- Implement search box detection for block numbers, hashes, and addresses.
|
- ⏳ Implement search box detection for block numbers, hashes, and addresses.
|
||||||
- Add pagination/infinite scroll for block and transaction tables.
|
- ⏳ Add pagination or infinite scroll for block and transaction tables.
|
||||||
- Ensure dark theme styling with readable typography and table hover states.
|
- ⏳ Expand responsive polish beyond overview cards (tablet/mobile grid, table hover states).
|
||||||
|
|
||||||
- **Documentation**
|
- **Documentation**
|
||||||
- Update `apps/explorer-web/README.md` with build/run instructions and API assumptions.
|
- ✅ Update `apps/explorer-web/README.md` with build/run instructions and API assumptions.
|
||||||
- Document required CORS configuration for blockchain node.
|
- ✅ Capture coordinator API + CORS considerations in README deployment notes.
|
||||||
|
|
||||||
## Stage 2+
|
## Stage 2+
|
||||||
|
|
||||||
|
|||||||
@ -17,10 +17,10 @@ This roadmap aggregates high-priority tasks derived from the bootstrap specifica
|
|||||||
|
|
||||||
- **Blockchain Node**
|
- **Blockchain Node**
|
||||||
- ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`).
|
- ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`).
|
||||||
- ⏳ Harden schema parity across runtime + storage:
|
- ✅ Harden schema parity across runtime + storage:
|
||||||
- Generate Alembic baseline and incremental migrations in `apps/blockchain-node/migrations/` mirroring the current SQLModel definitions.
|
- Alembic baseline + follow-on migrations in `apps/blockchain-node/migrations/` now track the SQLModel schema (blocks, transactions, receipts, accounts).
|
||||||
- Wire explicit SQLModel relationships (`Relationship`, `ForeignKey`) for block ↔ transaction ↔ receipt joins per bootstrap spec.
|
- Added `Relationship` + `ForeignKey` wiring in `apps/blockchain-node/src/aitbc_chain/models.py` for block ↔ transaction ↔ receipt joins.
|
||||||
- Add validation hooks (SQLModel validators / custom service checks) to enforce hash formatting, foreign key integrity, and enum bounds before persistence.
|
- Introduced hex/enum validation hooks via Pydantic validators to ensure hash integrity and safe persistence.
|
||||||
- ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`).
|
- ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`).
|
||||||
- ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`).
|
- ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`).
|
||||||
- ⏳ Deliver WebSocket RPC + P2P gossip layer:
|
- ⏳ Deliver WebSocket RPC + P2P gossip layer:
|
||||||
@ -38,25 +38,24 @@ This roadmap aggregates high-priority tasks derived from the bootstrap specifica
|
|||||||
- **Receipt Schema**
|
- **Receipt Schema**
|
||||||
- ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts).
|
- ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts).
|
||||||
- ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending).
|
- ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending).
|
||||||
- Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project.
|
- ✅ Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project (`packages/solidity/aitbc-token/`).
|
||||||
- Add deployment/test scripts and document minting flow.
|
- ✅ Add deployment/test scripts and document minting flow (`packages/solidity/aitbc-token/scripts/` and `docs/run.md`).
|
||||||
|
|
||||||
- **Wallet Daemon**
|
- **Wallet Daemon**
|
||||||
- Implement encrypted keystore (Argon2id + XChaCha20-Poly1305).
|
- ✅ Implement encrypted keystore (Argon2id + XChaCha20-Poly1305) via `KeystoreService`.
|
||||||
- Provide REST and JSON-RPC endpoints for wallet management and signing.
|
- ✅ Provide REST and JSON-RPC endpoints for wallet management and signing (`api_rest.py`, `api_jsonrpc.py`).
|
||||||
- Add mock ledger adapter with SQLite backend.
|
- ✅ Add mock ledger adapter with SQLite backend powering event history (`ledger_mock/`).
|
||||||
- ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures.
|
- ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures.
|
||||||
- ⏳ Implement Wallet SDK receipt ingestion + attestation surfacing:
|
- ✅ Implement Wallet SDK receipt ingestion + attestation surfacing:
|
||||||
- Add client for `/v1/jobs/{job_id}/receipts` with pagination + retry support (`packages/py/aitbc-sdk/src/receipts.py`).
|
- Added `/v1/jobs/{job_id}/receipts` client helpers with cursor pagination, retry/backoff, and summary reporting (`packages/py/aitbc-sdk/src/receipts.py`).
|
||||||
- Reuse signature verification helpers to validate miner + coordinator attestations before exposing results.
|
- Reused crypto helpers to validate miner and coordinator signatures, capturing per-key failure reasons for downstream UX.
|
||||||
- Surface aggregated attestation status + failure reasons via SDK API for UI consumption; mirror logic in pending JS helper.
|
- Surfaced aggregated attestation status (`ReceiptStatus`) and failure diagnostics for SDK + UI consumers; JS helper parity still planned.
|
||||||
|
|
||||||
## Stage 3 — Pool Hub & Marketplace
|
## Stage 2 — Pool Hub & Marketplace
|
||||||
|
|
||||||
- **Pool Hub**
|
- **Pool Hub**
|
||||||
- Implement miner registry, scoring engine, and `/v1/match` API.
|
- ✅ Implement miner registry, scoring engine, and `/v1/match` API with Redis/PostgreSQL backing stores.
|
||||||
- Integrate Redis/PostgreSQL backing stores per bootstrap spec.
|
- ✅ Add observability endpoints (`/v1/health`, `/v1/metrics`) plus Prometheus instrumentation and integration tests.
|
||||||
- Add observability endpoints (`/v1/health`, `/v1/metrics`).
|
|
||||||
|
|
||||||
- **Marketplace Web**
|
- **Marketplace Web**
|
||||||
- Initialize Vite project with vanilla TypeScript.
|
- Initialize Vite project with vanilla TypeScript.
|
||||||
@ -69,6 +68,7 @@ This roadmap aggregates high-priority tasks derived from the bootstrap specifica
|
|||||||
- ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables.
|
- ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables.
|
||||||
- ✅ Extend mock integrations to transactions, addresses, and receipts pages.
|
- ✅ Extend mock integrations to transactions, addresses, and receipts pages.
|
||||||
- ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold.
|
- ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold.
|
||||||
|
- ✅ Render overview stats from mock block/transaction/receipt summaries with graceful empty-state fallbacks.
|
||||||
- ⏳ Validate live mode + responsive polish:
|
- ⏳ Validate live mode + responsive polish:
|
||||||
- Hit live coordinator endpoints (`/v1/blocks`, `/v1/transactions`, `/v1/addresses`, `/v1/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models.
|
- Hit live coordinator endpoints (`/v1/blocks`, `/v1/transactions`, `/v1/addresses`, `/v1/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models.
|
||||||
- Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics).
|
- Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics).
|
||||||
@ -84,8 +84,7 @@ This roadmap aggregates high-priority tasks derived from the bootstrap specifica
|
|||||||
- Provide end-to-end tests + examples validating Pool Hub + wallet flows leveraging the coordinator receipt verification primitives.
|
- Provide end-to-end tests + examples validating Pool Hub + wallet flows leveraging the coordinator receipt verification primitives.
|
||||||
|
|
||||||
- **JavaScript SDK (`packages/js/aitbc-sdk`)**
|
- **JavaScript SDK (`packages/js/aitbc-sdk`)**
|
||||||
- Provide fetch-based wrapper for web clients.
|
- ✅ Provide fetch-based wrapper for web clients with TypeScript definitions and basic auth helpers.
|
||||||
- Offer TypeScript definitions and basic auth handling.
|
|
||||||
|
|
||||||
- **Examples**
|
- **Examples**
|
||||||
- Populate quickstart clients (Python/JS) with working code.
|
- Populate quickstart clients (Python/JS) with working code.
|
||||||
@ -109,6 +108,7 @@ Use this roadmap as the canonical checklist during implementation. Mark complete
|
|||||||
|
|
||||||
- **Blockchain Node**: bootstrap module layout (`apps/blockchain-node/src/`), implement SQLModel schemas and RPC stubs aligned with historical/attested receipts.
|
- **Blockchain Node**: bootstrap module layout (`apps/blockchain-node/src/`), implement SQLModel schemas and RPC stubs aligned with historical/attested receipts.
|
||||||
- **Explorer Web**: finish mock integration across all pages, add styling + mock/live toggle, and begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`).
|
- **Explorer Web**: finish mock integration across all pages, add styling + mock/live toggle, and begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`).
|
||||||
|
- Current focus: reuse new overview metrics scaffolding for blocks/transactions detail views and expand coverage to live data mode.
|
||||||
- **Marketplace Web**: scaffold Vite/vanilla frontends with mock integrations consuming the coordinator receipt history endpoints and SDK examples.
|
- **Marketplace Web**: scaffold Vite/vanilla frontends with mock integrations consuming the coordinator receipt history endpoints and SDK examples.
|
||||||
- **Pool Hub**: initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics.
|
- **Pool Hub**: initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics.
|
||||||
- **CI Enhancements**: add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows.
|
- **CI Enhancements**: add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows.
|
||||||
|
|||||||
54
docs/run.md
54
docs/run.md
@ -75,7 +75,6 @@ These instructions cover the newly scaffolded services. Install dependencies usi
|
|||||||
print("miner signature valid:", verification.miner_signature.valid)
|
print("miner signature valid:", verification.miner_signature.valid)
|
||||||
print("coordinator attestations:", [att.valid for att in verification.coordinator_attestations])
|
print("coordinator attestations:", [att.valid for att in verification.coordinator_attestations])
|
||||||
PY
|
PY
|
||||||
```
|
|
||||||
For receipts containing `attestations`, iterate the list and verify each entry with the corresponding public key.
|
For receipts containing `attestations`, iterate the list and verify each entry with the corresponding public key.
|
||||||
A JavaScript helper will ship with the Stage 2 SDK under `packages/js/`; until then, receipts can be verified with Node.js by loading the canonical JSON and invoking an Ed25519 verify function from `tweetnacl` (the payload is `canonical_json(receipt)` and the public key is `receipt.signature.public_key`).
|
A JavaScript helper will ship with the Stage 2 SDK under `packages/js/`; until then, receipts can be verified with Node.js by loading the canonical JSON and invoking an Ed25519 verify function from `tweetnacl` (the payload is `canonical_json(receipt)` and the public key is `receipt.signature.public_key`).
|
||||||
Example Node.js snippet:
|
Example Node.js snippet:
|
||||||
@ -98,11 +97,62 @@ These instructions cover the newly scaffolded services. Install dependencies usi
|
|||||||
JS
|
JS
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Solidity Token (`packages/solidity/aitbc-token/`)
|
||||||
|
|
||||||
|
1. Navigate to the token project:
|
||||||
|
```bash
|
||||||
|
cd packages/solidity/aitbc-token
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
2. Run the contract unit tests:
|
||||||
|
```bash
|
||||||
|
npx hardhat test
|
||||||
|
```
|
||||||
|
3. Deploy `AIToken` to the configured Hardhat network. Provide the coordinator (required) and attestor (optional) role recipients via environment variables:
|
||||||
|
```bash
|
||||||
|
COORDINATOR_ADDRESS=0xCoordinator \
|
||||||
|
ATTESTOR_ADDRESS=0xAttestor \
|
||||||
|
npx hardhat run scripts/deploy.ts --network localhost
|
||||||
|
```
|
||||||
|
The script prints the deployed address and automatically grants the coordinator and attestor roles if they are not already assigned. Export the printed address for follow-on steps:
|
||||||
|
```bash
|
||||||
|
export AITOKEN_ADDRESS=0xDeployedAddress
|
||||||
|
```
|
||||||
|
4. Mint tokens against an attested receipt by calling the contract from Hardhat’s console or a script. The helper below loads the deployed contract and invokes `mintWithReceipt` with an attestor signature:
|
||||||
|
```ts
|
||||||
|
// scripts/mintWithReceipt.ts
|
||||||
|
import { ethers } from "hardhat";
|
||||||
|
import { AIToken__factory } from "../typechain-types";
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const [coordinator] = await ethers.getSigners();
|
||||||
|
const token = AIToken__factory.connect(process.env.AITOKEN_ADDRESS!, coordinator);
|
||||||
|
|
||||||
|
const provider = "0xProvider";
|
||||||
|
const units = 100n;
|
||||||
|
const receiptHash = "0x...";
|
||||||
|
const signature = "0xSignedStructHash";
|
||||||
|
|
||||||
|
const tx = await token.mintWithReceipt(provider, units, receiptHash, signature);
|
||||||
|
await tx.wait();
|
||||||
|
console.log("Mint complete", await token.balanceOf(provider));
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exitCode = 1;
|
||||||
|
});
|
||||||
|
```
|
||||||
|
Execute the helper with `AITOKEN_ADDRESS` exported and the signature produced by the attestor key used in your tests or integration flow:
|
||||||
|
```bash
|
||||||
|
AITOKEN_ADDRESS=$AITOKEN_ADDRESS npx ts-node scripts/mintWithReceipt.ts
|
||||||
|
```
|
||||||
|
5. To derive the signature payload, reuse the `buildSignature` helper from `test/aitoken.test.ts` or recreate it in a script. The struct hash encodes `(chainId, contractAddress, provider, units, receiptHash)` and must be signed by an authorized attestor account.
|
||||||
|
|
||||||
## Wallet Daemon (`apps/wallet-daemon/`)
|
## Wallet Daemon (`apps/wallet-daemon/`)
|
||||||
|
|
||||||
1. Navigate to the service directory:
|
1. Navigate to the service directory:
|
||||||
```bash
|
```bash
|
||||||
cd apps/wallet-daemon
|
|
||||||
```
|
```
|
||||||
2. Install dependencies:
|
2. Install dependencies:
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
25
packages/js/aitbc-sdk/package.json
Normal file
25
packages/js/aitbc-sdk/package.json
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"name": "@aitbc/aitbc-sdk",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "AITBC JavaScript SDK for coordinator receipts",
|
||||||
|
"type": "module",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"module": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc -p tsconfig.json",
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"cross-fetch": "^4.0.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^20.11.30",
|
||||||
|
"typescript": "^5.4.5",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
|
},
|
||||||
|
"keywords": ["aitbc", "sdk", "receipts"],
|
||||||
|
"author": "AITBC Team",
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
23
packages/js/aitbc-sdk/src/client.test.ts
Normal file
23
packages/js/aitbc-sdk/src/client.test.ts
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
|
||||||
|
import { AitbcClient } from "./client";
|
||||||
|
|
||||||
|
const createClient = () =>
|
||||||
|
new AitbcClient({
|
||||||
|
baseUrl: "https://api.example.com",
|
||||||
|
apiKey: "test-key",
|
||||||
|
fetchImpl: async (input: RequestInfo | URL, init?: RequestInit) =>
|
||||||
|
new Response(JSON.stringify({ job_id: "job", candidates: [] }), {
|
||||||
|
status: 200,
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("AitbcClient", () => {
|
||||||
|
it("sends match requests", async () => {
|
||||||
|
const client = createClient();
|
||||||
|
const response = await client.match({ jobId: "job" });
|
||||||
|
expect(response.jobId).toBe("job");
|
||||||
|
expect(response.candidates).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
138
packages/js/aitbc-sdk/src/client.ts
Normal file
138
packages/js/aitbc-sdk/src/client.ts
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
import type {
|
||||||
|
ClientOptions,
|
||||||
|
MatchRequest,
|
||||||
|
MatchResponse,
|
||||||
|
HealthResponse,
|
||||||
|
MetricsResponse,
|
||||||
|
WalletSignRequest,
|
||||||
|
WalletSignResponse,
|
||||||
|
RequestOptions,
|
||||||
|
} from "./types";
|
||||||
|
|
||||||
|
const DEFAULT_HEADERS = {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json",
|
||||||
|
};
|
||||||
|
|
||||||
|
export class AitbcClient {
|
||||||
|
private readonly baseUrl: string;
|
||||||
|
private readonly apiKey?: string;
|
||||||
|
private readonly basicAuth?: ClientOptions["basicAuth"];
|
||||||
|
private readonly fetchImpl: typeof fetch;
|
||||||
|
|
||||||
|
constructor(options: ClientOptions) {
|
||||||
|
this.baseUrl = options.baseUrl.replace(/\/$/, "");
|
||||||
|
this.apiKey = options.apiKey;
|
||||||
|
this.basicAuth = options.basicAuth;
|
||||||
|
this.fetchImpl = options.fetchImpl ?? fetch;
|
||||||
|
}
|
||||||
|
|
||||||
|
async match(payload: MatchRequest, options?: RequestOptions): Promise<MatchResponse> {
|
||||||
|
const raw = await this.request<any>("POST", "/v1/match", {
|
||||||
|
...options,
|
||||||
|
body: JSON.stringify({
|
||||||
|
job_id: payload.jobId,
|
||||||
|
requirements: payload.requirements ?? {},
|
||||||
|
hints: payload.hints ?? {},
|
||||||
|
top_k: payload.topK ?? 1,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
jobId: raw.job_id,
|
||||||
|
candidates: (raw.candidates ?? []).map((candidate: any) => ({
|
||||||
|
minerId: candidate.miner_id,
|
||||||
|
addr: candidate.addr,
|
||||||
|
proto: candidate.proto,
|
||||||
|
score: candidate.score,
|
||||||
|
explain: candidate.explain,
|
||||||
|
etaMs: candidate.eta_ms,
|
||||||
|
price: candidate.price,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async health(options?: RequestOptions): Promise<HealthResponse> {
|
||||||
|
const raw = await this.request<any>("GET", "/v1/health", options);
|
||||||
|
return {
|
||||||
|
status: raw.status,
|
||||||
|
db: raw.db,
|
||||||
|
redis: raw.redis,
|
||||||
|
minersOnline: raw.miners_online,
|
||||||
|
dbError: raw.db_error ?? null,
|
||||||
|
redisError: raw.redis_error ?? null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async metrics(options?: RequestOptions): Promise<MetricsResponse> {
|
||||||
|
const response = await this.rawRequest("GET", "/metrics", options);
|
||||||
|
const raw = await response.text();
|
||||||
|
return { raw };
|
||||||
|
}
|
||||||
|
|
||||||
|
async sign(request: WalletSignRequest, options?: RequestOptions): Promise<WalletSignResponse> {
|
||||||
|
return this.request<WalletSignResponse>("POST", `/v1/wallets/${encodeURIComponent(request.walletId)}/sign`, {
|
||||||
|
...options,
|
||||||
|
body: JSON.stringify({
|
||||||
|
password: request.password,
|
||||||
|
message_base64: request.messageBase64,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async request<T>(method: string, path: string, options: RequestOptions = {}): Promise<T> {
|
||||||
|
const response = await this.rawRequest(method, path, options);
|
||||||
|
const text = await response.text();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`AITBC request failed (${response.status}): ${text || response.statusText}`);
|
||||||
|
}
|
||||||
|
return text ? (JSON.parse(text) as T) : ({} as T);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async rawRequest(method: string, path: string, options: RequestOptions = {}): Promise<Response> {
|
||||||
|
const url = this.buildUrl(path, options.query);
|
||||||
|
const headers = this.buildHeaders(options.headers);
|
||||||
|
|
||||||
|
return this.fetchImpl(url, {
|
||||||
|
method,
|
||||||
|
...options,
|
||||||
|
headers,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildUrl(path: string, query?: RequestOptions["query"]): string {
|
||||||
|
const url = new URL(`${this.baseUrl}${path}`);
|
||||||
|
if (query) {
|
||||||
|
for (const [key, value] of Object.entries(query)) {
|
||||||
|
if (value !== undefined) {
|
||||||
|
url.searchParams.set(key, String(value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return url.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildHeaders(extra?: HeadersInit): HeadersInit {
|
||||||
|
const headers: Record<string, string> = { ...DEFAULT_HEADERS };
|
||||||
|
if (this.apiKey) {
|
||||||
|
headers["X-Api-Key"] = this.apiKey;
|
||||||
|
}
|
||||||
|
if (this.basicAuth) {
|
||||||
|
const token = btoa(`${this.basicAuth.username}:${this.basicAuth.password}`);
|
||||||
|
headers["Authorization"] = `Basic ${token}`;
|
||||||
|
}
|
||||||
|
if (extra) {
|
||||||
|
if (extra instanceof Headers) {
|
||||||
|
extra.forEach((value, key) => {
|
||||||
|
headers[key] = value;
|
||||||
|
});
|
||||||
|
} else if (Array.isArray(extra)) {
|
||||||
|
for (const [key, value] of extra) {
|
||||||
|
headers[key] = value;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Object.assign(headers, extra as Record<string, string>);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
}
|
||||||
59
packages/js/aitbc-sdk/src/types.ts
Normal file
59
packages/js/aitbc-sdk/src/types.ts
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
export interface MatchRequest {
|
||||||
|
jobId: string;
|
||||||
|
requirements?: Record<string, unknown>;
|
||||||
|
hints?: Record<string, unknown>;
|
||||||
|
topK?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MatchCandidate {
|
||||||
|
minerId: string;
|
||||||
|
addr: string;
|
||||||
|
proto: string;
|
||||||
|
score: number;
|
||||||
|
explain?: string;
|
||||||
|
etaMs?: number;
|
||||||
|
price?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MatchResponse {
|
||||||
|
jobId: string;
|
||||||
|
candidates: MatchCandidate[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface HealthResponse {
|
||||||
|
status: "ok" | "degraded";
|
||||||
|
db: boolean;
|
||||||
|
redis: boolean;
|
||||||
|
minersOnline: number;
|
||||||
|
dbError?: string | null;
|
||||||
|
redisError?: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MetricsResponse {
|
||||||
|
raw: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WalletSignRequest {
|
||||||
|
walletId: string;
|
||||||
|
password: string;
|
||||||
|
messageBase64: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WalletSignResponse {
|
||||||
|
walletId: string;
|
||||||
|
signatureBase64: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClientOptions {
|
||||||
|
baseUrl: string;
|
||||||
|
apiKey?: string;
|
||||||
|
basicAuth?: {
|
||||||
|
username: string;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
fetchImpl?: typeof fetch;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RequestOptions extends RequestInit {
|
||||||
|
query?: Record<string, string | number | boolean | undefined>;
|
||||||
|
}
|
||||||
16
packages/js/aitbc-sdk/tsconfig.json
Normal file
16
packages/js/aitbc-sdk/tsconfig.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2020",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "Node",
|
||||||
|
"declaration": true,
|
||||||
|
"declarationDir": "dist",
|
||||||
|
"outDir": "dist",
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"forceConsistentCasingInFileNames": true
|
||||||
|
},
|
||||||
|
"include": ["src/**/*.ts"],
|
||||||
|
"exclude": ["dist", "node_modules"]
|
||||||
|
}
|
||||||
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from .receipts import (
|
from .receipts import (
|
||||||
CoordinatorReceiptClient,
|
CoordinatorReceiptClient,
|
||||||
|
ReceiptPage,
|
||||||
ReceiptVerification,
|
ReceiptVerification,
|
||||||
SignatureValidation,
|
SignatureValidation,
|
||||||
verify_receipt,
|
verify_receipt,
|
||||||
@ -10,6 +11,7 @@ from .receipts import (
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"CoordinatorReceiptClient",
|
"CoordinatorReceiptClient",
|
||||||
|
"ReceiptPage",
|
||||||
"ReceiptVerification",
|
"ReceiptVerification",
|
||||||
"SignatureValidation",
|
"SignatureValidation",
|
||||||
"verify_receipt",
|
"verify_receipt",
|
||||||
|
|||||||
@ -1,7 +1,8 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass
|
import time
|
||||||
from typing import Any, Dict, Iterable, List, Optional
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, Dict, Iterable, Iterator, List, Optional
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
import base64
|
import base64
|
||||||
@ -14,6 +15,7 @@ class SignatureValidation:
|
|||||||
key_id: str
|
key_id: str
|
||||||
valid: bool
|
valid: bool
|
||||||
algorithm: str = "Ed25519"
|
algorithm: str = "Ed25519"
|
||||||
|
reason: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -28,12 +30,58 @@ class ReceiptVerification:
|
|||||||
return False
|
return False
|
||||||
return all(att.valid for att in self.coordinator_attestations)
|
return all(att.valid for att in self.coordinator_attestations)
|
||||||
|
|
||||||
|
def failure_reasons(self) -> List[str]:
|
||||||
|
reasons: List[str] = []
|
||||||
|
if not self.miner_signature.valid:
|
||||||
|
key_part = self.miner_signature.key_id or "unknown"
|
||||||
|
reasons.append(f"miner_signature_invalid:{key_part}")
|
||||||
|
for att in self.coordinator_attestations:
|
||||||
|
if not att.valid:
|
||||||
|
key_part = att.key_id or "unknown"
|
||||||
|
reasons.append(f"coordinator_attestation_invalid:{key_part}")
|
||||||
|
return reasons
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ReceiptFailure:
|
||||||
|
receipt_id: str
|
||||||
|
reasons: List[str]
|
||||||
|
verification: ReceiptVerification
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ReceiptStatus:
|
||||||
|
job_id: str
|
||||||
|
total: int
|
||||||
|
verified_count: int
|
||||||
|
failed: List[ReceiptVerification] = field(default_factory=list)
|
||||||
|
latest_verified: Optional[ReceiptVerification] = None
|
||||||
|
failure_reasons: Dict[str, int] = field(default_factory=dict)
|
||||||
|
failures: List[ReceiptFailure] = field(default_factory=list)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all_verified(self) -> bool:
|
||||||
|
return self.total > 0 and self.verified_count == self.total
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_failures(self) -> bool:
|
||||||
|
return bool(self.failures)
|
||||||
|
|
||||||
|
|
||||||
class CoordinatorReceiptClient:
|
class CoordinatorReceiptClient:
|
||||||
def __init__(self, base_url: str, api_key: str, timeout: float = 10.0) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
base_url: str,
|
||||||
|
api_key: str,
|
||||||
|
timeout: float = 10.0,
|
||||||
|
max_retries: int = 3,
|
||||||
|
backoff_seconds: float = 0.5,
|
||||||
|
) -> None:
|
||||||
self.base_url = base_url.rstrip("/")
|
self.base_url = base_url.rstrip("/")
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
self.max_retries = max_retries
|
||||||
|
self.backoff_seconds = backoff_seconds
|
||||||
|
|
||||||
def _client(self) -> httpx.Client:
|
def _client(self) -> httpx.Client:
|
||||||
return httpx.Client(
|
return httpx.Client(
|
||||||
@ -43,28 +91,139 @@ class CoordinatorReceiptClient:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def fetch_latest(self, job_id: str) -> Optional[Dict[str, Any]]:
|
def fetch_latest(self, job_id: str) -> Optional[Dict[str, Any]]:
|
||||||
with self._client() as client:
|
resp = self._request("GET", f"/v1/jobs/{job_id}/receipt", allow_404=True)
|
||||||
resp = client.get(f"/v1/jobs/{job_id}/receipt")
|
if resp is None:
|
||||||
if resp.status_code == 404:
|
|
||||||
return None
|
return None
|
||||||
resp.raise_for_status()
|
|
||||||
return resp.json()
|
return resp.json()
|
||||||
|
|
||||||
def fetch_history(self, job_id: str) -> List[Dict[str, Any]]:
|
def fetch_history(self, job_id: str) -> List[Dict[str, Any]]:
|
||||||
|
return list(self.iter_receipts(job_id=job_id))
|
||||||
|
|
||||||
|
def iter_receipts(self, job_id: str, page_size: int = 100) -> Iterator[Dict[str, Any]]:
|
||||||
|
cursor: Optional[str] = None
|
||||||
|
while True:
|
||||||
|
page = self.fetch_receipts_page(job_id=job_id, cursor=cursor, limit=page_size)
|
||||||
|
for item in page.items:
|
||||||
|
yield item
|
||||||
|
|
||||||
|
if not page.next_cursor:
|
||||||
|
break
|
||||||
|
cursor = page.next_cursor
|
||||||
|
|
||||||
|
def fetch_receipts_page(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
job_id: str,
|
||||||
|
cursor: Optional[str] = None,
|
||||||
|
limit: Optional[int] = 100,
|
||||||
|
) -> "ReceiptPage":
|
||||||
|
params: Dict[str, Any] = {}
|
||||||
|
if cursor:
|
||||||
|
params["cursor"] = cursor
|
||||||
|
if limit is not None:
|
||||||
|
params["limit"] = limit
|
||||||
|
|
||||||
|
response = self._request("GET", f"/v1/jobs/{job_id}/receipts", params=params)
|
||||||
|
payload = response.json()
|
||||||
|
|
||||||
|
if isinstance(payload, list):
|
||||||
|
items = payload
|
||||||
|
next_cursor: Optional[str] = None
|
||||||
|
raw: Dict[str, Any] = {"items": items}
|
||||||
|
else:
|
||||||
|
items = list(payload.get("items") or [])
|
||||||
|
next_cursor = payload.get("next_cursor") or payload.get("next") or payload.get("cursor")
|
||||||
|
raw = payload
|
||||||
|
|
||||||
|
return ReceiptPage(items=items, next_cursor=next_cursor, raw=raw)
|
||||||
|
|
||||||
|
def summarize_receipts(self, job_id: str, page_size: int = 100) -> "ReceiptStatus":
|
||||||
|
receipts = list(self.iter_receipts(job_id=job_id, page_size=page_size))
|
||||||
|
if not receipts:
|
||||||
|
return ReceiptStatus(job_id=job_id, total=0, verified_count=0, failed=[], latest_verified=None)
|
||||||
|
|
||||||
|
verifications = verify_receipts(receipts)
|
||||||
|
verified = [v for v in verifications if v.verified]
|
||||||
|
failed = [v for v in verifications if not v.verified]
|
||||||
|
failures: List[ReceiptFailure] = []
|
||||||
|
reason_counts: Dict[str, int] = {}
|
||||||
|
|
||||||
|
for verification in failed:
|
||||||
|
reasons = verification.failure_reasons()
|
||||||
|
receipt_id = str(
|
||||||
|
verification.receipt.get("receipt_id")
|
||||||
|
or verification.receipt.get("id")
|
||||||
|
or verification.receipt.get("uuid")
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
for reason in reasons:
|
||||||
|
reason_counts[reason] = reason_counts.get(reason, 0) + 1
|
||||||
|
failures.append(ReceiptFailure(receipt_id=receipt_id, reasons=reasons, verification=verification))
|
||||||
|
|
||||||
|
latest_verified = verified[-1] if verified else None
|
||||||
|
return ReceiptStatus(
|
||||||
|
job_id=job_id,
|
||||||
|
total=len(verifications),
|
||||||
|
verified_count=len(verified),
|
||||||
|
failed=failed,
|
||||||
|
latest_verified=latest_verified,
|
||||||
|
failure_reasons=reason_counts,
|
||||||
|
failures=failures,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
url: str,
|
||||||
|
*,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
allow_404: bool = False,
|
||||||
|
) -> Optional[httpx.Response]:
|
||||||
|
attempt = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
with self._client() as client:
|
with self._client() as client:
|
||||||
resp = client.get(f"/v1/jobs/{job_id}/receipts")
|
response = client.request(method=method, url=url, params=params)
|
||||||
resp.raise_for_status()
|
except httpx.HTTPError:
|
||||||
data = resp.json()
|
if attempt >= self.max_retries:
|
||||||
if isinstance(data, dict) and isinstance(data.get("items"), list):
|
raise
|
||||||
return data["items"]
|
attempt += 1
|
||||||
raise ValueError("unexpected receipt history response shape")
|
time.sleep(self.backoff_seconds * (2 ** (attempt - 1)))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if response.status_code == 404 and allow_404:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if response.status_code in {429} or response.status_code >= 500:
|
||||||
|
if attempt >= self.max_retries:
|
||||||
|
response.raise_for_status()
|
||||||
|
else:
|
||||||
|
attempt += 1
|
||||||
|
time.sleep(self.backoff_seconds * (2 ** (attempt - 1)))
|
||||||
|
continue
|
||||||
|
|
||||||
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ReceiptPage:
|
||||||
|
items: List[Dict[str, Any]]
|
||||||
|
next_cursor: Optional[str] = None
|
||||||
|
raw: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
def _verify_signature(payload: Dict[str, Any], signature: Dict[str, Any]) -> SignatureValidation:
|
def _verify_signature(payload: Dict[str, Any], signature: Dict[str, Any]) -> SignatureValidation:
|
||||||
key_id = signature.get("key_id", "")
|
key_id = signature.get("key_id", "")
|
||||||
verifier = ReceiptVerifier(_decode_key(key_id))
|
verifier = ReceiptVerifier(_decode_key(key_id))
|
||||||
|
try:
|
||||||
valid = verifier.verify(payload, signature)
|
valid = verifier.verify(payload, signature)
|
||||||
return SignatureValidation(key_id=key_id, valid=valid)
|
reason: Optional[str] = None if valid else "signature mismatch"
|
||||||
|
except Exception as exc: # pragma: no cover - verifier could raise on malformed payloads
|
||||||
|
valid = False
|
||||||
|
reason = str(exc) or "signature verification error"
|
||||||
|
algorithm = signature.get("algorithm") or "Ed25519"
|
||||||
|
return SignatureValidation(key_id=key_id, valid=valid, algorithm=algorithm, reason=reason)
|
||||||
|
|
||||||
|
|
||||||
def verify_receipt(receipt: Dict[str, Any]) -> ReceiptVerification:
|
def verify_receipt(receipt: Dict[str, Any]) -> ReceiptVerification:
|
||||||
|
|||||||
@ -4,12 +4,16 @@ from dataclasses import dataclass
|
|||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import httpx
|
||||||
from nacl.signing import SigningKey
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
from aitbc_crypto.signing import ReceiptSigner
|
from aitbc_crypto.signing import ReceiptSigner
|
||||||
|
|
||||||
from aitbc_sdk.receipts import (
|
from aitbc_sdk.receipts import (
|
||||||
CoordinatorReceiptClient,
|
CoordinatorReceiptClient,
|
||||||
|
ReceiptFailure,
|
||||||
|
ReceiptPage,
|
||||||
|
ReceiptStatus,
|
||||||
ReceiptVerification,
|
ReceiptVerification,
|
||||||
verify_receipt,
|
verify_receipt,
|
||||||
verify_receipts,
|
verify_receipts,
|
||||||
@ -53,6 +57,8 @@ def test_verify_receipt_success(sample_payload: Dict[str, object]) -> None:
|
|||||||
assert isinstance(result, ReceiptVerification)
|
assert isinstance(result, ReceiptVerification)
|
||||||
assert result.miner_signature.valid is True
|
assert result.miner_signature.valid is True
|
||||||
assert result.verified is True
|
assert result.verified is True
|
||||||
|
assert result.failure_reasons() == []
|
||||||
|
assert result.miner_signature.reason is None
|
||||||
|
|
||||||
|
|
||||||
def test_verify_receipt_failure(sample_payload: Dict[str, object]) -> None:
|
def test_verify_receipt_failure(sample_payload: Dict[str, object]) -> None:
|
||||||
@ -63,6 +69,8 @@ def test_verify_receipt_failure(sample_payload: Dict[str, object]) -> None:
|
|||||||
result = verify_receipt(receipt)
|
result = verify_receipt(receipt)
|
||||||
assert result.miner_signature.valid is False
|
assert result.miner_signature.valid is False
|
||||||
assert result.verified is False
|
assert result.verified is False
|
||||||
|
assert result.failure_reasons() == [f"miner_signature_invalid:{result.miner_signature.key_id}"]
|
||||||
|
assert result.miner_signature.reason == "signature mismatch"
|
||||||
|
|
||||||
|
|
||||||
def test_verify_receipts_batch(sample_payload: Dict[str, object]) -> None:
|
def test_verify_receipts_batch(sample_payload: Dict[str, object]) -> None:
|
||||||
@ -84,33 +92,20 @@ class _DummyResponse:
|
|||||||
|
|
||||||
def raise_for_status(self) -> None:
|
def raise_for_status(self) -> None:
|
||||||
if self.status_code >= 400:
|
if self.status_code >= 400:
|
||||||
raise Exception(f"HTTP {self.status_code}")
|
raise httpx.HTTPStatusError(
|
||||||
|
f"HTTP {self.status_code}", request=None, response=httpx.Response(self.status_code)
|
||||||
|
)
|
||||||
class _DummyClient:
|
|
||||||
def __init__(self, responses: List[_DummyResponse]):
|
|
||||||
self._responses = responses
|
|
||||||
|
|
||||||
def get(self, url: str, *args, **kwargs) -> _DummyResponse:
|
|
||||||
if not self._responses:
|
|
||||||
raise AssertionError("no more responses configured")
|
|
||||||
return self._responses.pop(0)
|
|
||||||
|
|
||||||
def __enter__(self) -> "_DummyClient":
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc, tb) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_coordinator_receipt_client_latest(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
def test_coordinator_receipt_client_latest(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
signing_key = SigningKey.generate()
|
signing_key = SigningKey.generate()
|
||||||
receipt = _sign_receipt(sample_payload, signing_key)
|
receipt = _sign_receipt(sample_payload, signing_key)
|
||||||
|
|
||||||
def _mock_client(self) -> _DummyClient:
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
return _DummyClient([_DummyResponse(200, receipt)])
|
assert method == "GET"
|
||||||
|
return _DummyResponse(200, receipt)
|
||||||
|
|
||||||
monkeypatch.setattr(CoordinatorReceiptClient, "_client", _mock_client)
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
client = CoordinatorReceiptClient("https://coordinator", "api")
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
fetched = client.fetch_latest("job-123")
|
fetched = client.fetch_latest("job-123")
|
||||||
@ -121,10 +116,11 @@ def test_coordinator_receipt_client_history(monkeypatch, sample_payload: Dict[st
|
|||||||
signing_key = SigningKey.generate()
|
signing_key = SigningKey.generate()
|
||||||
receipts = [_sign_receipt(sample_payload, signing_key)]
|
receipts = [_sign_receipt(sample_payload, signing_key)]
|
||||||
|
|
||||||
def _mock_client(self) -> _DummyClient:
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
return _DummyClient([_DummyResponse(200, {"items": receipts})])
|
assert method == "GET"
|
||||||
|
return _DummyResponse(200, {"items": receipts})
|
||||||
|
|
||||||
monkeypatch.setattr(CoordinatorReceiptClient, "_client", _mock_client)
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
client = CoordinatorReceiptClient("https://coordinator", "api")
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
history = client.fetch_history("job-123")
|
history = client.fetch_history("job-123")
|
||||||
@ -132,10 +128,153 @@ def test_coordinator_receipt_client_history(monkeypatch, sample_payload: Dict[st
|
|||||||
|
|
||||||
|
|
||||||
def test_coordinator_receipt_client_latest_404(monkeypatch) -> None:
|
def test_coordinator_receipt_client_latest_404(monkeypatch) -> None:
|
||||||
def _mock_client(self) -> _DummyClient:
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
return _DummyClient([_DummyResponse(404, {})])
|
assert allow_404 is True
|
||||||
|
return None
|
||||||
|
|
||||||
monkeypatch.setattr(CoordinatorReceiptClient, "_client", _mock_client)
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
client = CoordinatorReceiptClient("https://coordinator", "api")
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
assert client.fetch_latest("job-missing") is None
|
assert client.fetch_latest("job-missing") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch_receipts_page_list(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
items = [_sign_receipt(sample_payload, SigningKey.generate())]
|
||||||
|
|
||||||
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
|
return _DummyResponse(200, items)
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
|
page = client.fetch_receipts_page(job_id="job-1")
|
||||||
|
assert isinstance(page, ReceiptPage)
|
||||||
|
assert page.items == items
|
||||||
|
assert page.next_cursor is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch_receipts_page_dict_with_cursor(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
receipts = [_sign_receipt(sample_payload, signing_key)]
|
||||||
|
responses = [
|
||||||
|
_DummyResponse(200, {"items": receipts, "next_cursor": "cursor-1"}),
|
||||||
|
_DummyResponse(200, {"items": receipts, "next": None}),
|
||||||
|
]
|
||||||
|
|
||||||
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
|
assert method == "GET"
|
||||||
|
return responses.pop(0)
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
|
first_page = client.fetch_receipts_page(job_id="job-1")
|
||||||
|
assert first_page.next_cursor == "cursor-1"
|
||||||
|
second_page = client.fetch_receipts_page(job_id="job-1", cursor=first_page.next_cursor)
|
||||||
|
assert second_page.next_cursor is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_iter_receipts_handles_pagination(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
receipt_a = _sign_receipt(sample_payload, signing_key)
|
||||||
|
receipt_b = _sign_receipt(sample_payload, signing_key)
|
||||||
|
responses = [
|
||||||
|
_DummyResponse(200, {"items": [receipt_a], "next_cursor": "cursor-2"}),
|
||||||
|
_DummyResponse(200, {"items": [receipt_b]}),
|
||||||
|
]
|
||||||
|
|
||||||
|
def _mock_request(self, method, url, params=None, allow_404=False):
|
||||||
|
return responses.pop(0)
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "_request", _mock_request)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
|
collected = list(client.iter_receipts("job-123", page_size=1))
|
||||||
|
assert collected == [receipt_a, receipt_b]
|
||||||
|
|
||||||
|
|
||||||
|
def test_request_retries_on_transient(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
responses: List[object] = [
|
||||||
|
httpx.ReadTimeout("timeout"),
|
||||||
|
_DummyResponse(429, {}),
|
||||||
|
_DummyResponse(200, {}),
|
||||||
|
]
|
||||||
|
|
||||||
|
class _RetryClient:
|
||||||
|
def __init__(self, shared: List[object]):
|
||||||
|
self._shared = shared
|
||||||
|
|
||||||
|
def request(self, method: str, url: str, params=None):
|
||||||
|
obj = self._shared.pop(0)
|
||||||
|
if isinstance(obj, Exception):
|
||||||
|
raise obj
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def __enter__(self) -> "_RetryClient":
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc, tb) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _mock_client(self):
|
||||||
|
return _RetryClient(responses)
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "_client", _mock_client)
|
||||||
|
monkeypatch.setattr("aitbc_sdk.receipts.time.sleep", lambda *_args: None)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api", max_retries=3)
|
||||||
|
response = client._request("GET", "/v1/jobs/job-1/receipts")
|
||||||
|
assert isinstance(response, _DummyResponse)
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
def test_summarize_receipts_all_verified(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
receipts = [_sign_receipt(sample_payload, signing_key) for _ in range(2)]
|
||||||
|
|
||||||
|
def _fake_iter(self, job_id: str, page_size: int = 100):
|
||||||
|
yield from receipts
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "iter_receipts", _fake_iter)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
|
status = client.summarize_receipts("job-verified")
|
||||||
|
|
||||||
|
assert isinstance(status, ReceiptStatus)
|
||||||
|
assert status.total == 2
|
||||||
|
assert status.verified_count == 2
|
||||||
|
assert status.all_verified is True
|
||||||
|
assert status.has_failures is False
|
||||||
|
assert status.failure_reasons == {}
|
||||||
|
assert status.failures == []
|
||||||
|
assert isinstance(status.latest_verified, ReceiptVerification)
|
||||||
|
|
||||||
|
|
||||||
|
def test_summarize_receipts_with_failures(monkeypatch, sample_payload: Dict[str, object]) -> None:
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
good = _sign_receipt(sample_payload, signing_key)
|
||||||
|
|
||||||
|
bad = dict(good)
|
||||||
|
bad["metadata"] = {"job_payload": {"task": "tampered"}}
|
||||||
|
|
||||||
|
receipts = [good, bad]
|
||||||
|
|
||||||
|
def _fake_iter(self, job_id: str, page_size: int = 100):
|
||||||
|
yield from receipts
|
||||||
|
|
||||||
|
monkeypatch.setattr(CoordinatorReceiptClient, "iter_receipts", _fake_iter)
|
||||||
|
|
||||||
|
client = CoordinatorReceiptClient("https://coordinator", "api")
|
||||||
|
status = client.summarize_receipts("job-mixed")
|
||||||
|
|
||||||
|
assert status.total == 2
|
||||||
|
assert status.verified_count == 1
|
||||||
|
assert status.all_verified is False
|
||||||
|
assert status.has_failures is True
|
||||||
|
assert status.failure_reasons # not empty
|
||||||
|
assert status.failure_reasons[next(iter(status.failure_reasons))] == 1
|
||||||
|
assert len(status.failures) == 1
|
||||||
|
failure = status.failures[0]
|
||||||
|
assert isinstance(failure, ReceiptFailure)
|
||||||
|
assert failure.reasons
|
||||||
|
assert failure.verification.miner_signature.valid is False
|
||||||
|
|||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,236 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "AccessControl",
|
||||||
|
"sourceName": "@openzeppelin/contracts/access/AccessControl.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "AccessControlBadConfirmation",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "neededRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "AccessControlUnauthorizedAccount",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "previousAdminRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "newAdminRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleAdminChanged",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleGranted",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleRevoked",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "DEFAULT_ADMIN_ROLE",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "getRoleAdmin",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "grantRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "hasRole",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "callerConfirmation",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "renounceRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "revokeRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes4",
|
||||||
|
"name": "interfaceId",
|
||||||
|
"type": "bytes4"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "supportsInterface",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,204 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IAccessControl",
|
||||||
|
"sourceName": "@openzeppelin/contracts/access/IAccessControl.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "AccessControlBadConfirmation",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "neededRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "AccessControlUnauthorizedAccount",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "previousAdminRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "newAdminRole",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleAdminChanged",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleGranted",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "RoleRevoked",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "getRoleAdmin",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "",
|
||||||
|
"type": "bytes32"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "grantRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "hasRole",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "callerConfirmation",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "renounceRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bytes32",
|
||||||
|
"name": "role",
|
||||||
|
"type": "bytes32"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "revokeRole",
|
||||||
|
"outputs": [],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,113 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IERC1155Errors",
|
||||||
|
"sourceName": "@openzeppelin/contracts/interfaces/draft-IERC6093.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "balance",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "needed",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "tokenId",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InsufficientBalance",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "approver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InvalidApprover",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "idsLength",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "valuesLength",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InvalidArrayLength",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "operator",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InvalidOperator",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "receiver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InvalidReceiver",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155InvalidSender",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "operator",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC1155MissingApprovalForAll",
|
||||||
|
"type": "error"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,97 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IERC20Errors",
|
||||||
|
"sourceName": "@openzeppelin/contracts/interfaces/draft-IERC6093.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "allowance",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "needed",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InsufficientAllowance",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "balance",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "needed",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InsufficientBalance",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "approver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidApprover",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "receiver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidReceiver",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidSender",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidSpender",
|
||||||
|
"type": "error"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,114 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IERC721Errors",
|
||||||
|
"sourceName": "@openzeppelin/contracts/interfaces/draft-IERC6093.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "tokenId",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721IncorrectOwner",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "operator",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "tokenId",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InsufficientApproval",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "approver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InvalidApprover",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "operator",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InvalidOperator",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InvalidOwner",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "receiver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InvalidReceiver",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721InvalidSender",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "tokenId",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC721NonexistentToken",
|
||||||
|
"type": "error"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,319 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "ERC20",
|
||||||
|
"sourceName": "@openzeppelin/contracts/token/ERC20/ERC20.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "allowance",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "needed",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InsufficientAllowance",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "balance",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "needed",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InsufficientBalance",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "approver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidApprover",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "receiver",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidReceiver",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "sender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidSender",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ERC20InvalidSpender",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Approval",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Transfer",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "allowance",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "approve",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "balanceOf",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "decimals",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint8",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "name",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "string",
|
||||||
|
"name": "",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "symbol",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "string",
|
||||||
|
"name": "",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "totalSupply",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transfer",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transferFrom",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,194 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IERC20",
|
||||||
|
"sourceName": "@openzeppelin/contracts/token/ERC20/IERC20.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Approval",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Transfer",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "allowance",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "approve",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "balanceOf",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "totalSupply",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transfer",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transferFrom",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,233 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "IERC20Metadata",
|
||||||
|
"sourceName": "@openzeppelin/contracts/token/ERC20/extensions/IERC20Metadata.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Approval",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anonymous": false,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": true,
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"indexed": false,
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "Transfer",
|
||||||
|
"type": "event"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "owner",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "allowance",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "spender",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "approve",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "account",
|
||||||
|
"type": "address"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "balanceOf",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "decimals",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint8",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint8"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "name",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "string",
|
||||||
|
"name": "",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "symbol",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "string",
|
||||||
|
"name": "",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "totalSupply",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "view",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transfer",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "from",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "address",
|
||||||
|
"name": "to",
|
||||||
|
"type": "address"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "transferFrom",
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"internalType": "bool",
|
||||||
|
"name": "",
|
||||||
|
"type": "bool"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"stateMutability": "nonpayable",
|
||||||
|
"type": "function"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "Context",
|
||||||
|
"sourceName": "@openzeppelin/contracts/utils/Context.sol",
|
||||||
|
"abi": [],
|
||||||
|
"bytecode": "0x",
|
||||||
|
"deployedBytecode": "0x",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "Panic",
|
||||||
|
"sourceName": "@openzeppelin/contracts/utils/Panic.sol",
|
||||||
|
"abi": [],
|
||||||
|
"bytecode": "0x60566037600b82828239805160001a607314602a57634e487b7160e01b600052600060045260246000fd5b30600052607381538281f3fe73000000000000000000000000000000000000000030146080604052600080fdfea2646970667358221220fec73f632edd47d0e7ae9933b51e8f4da78eb465818c4263bebf6623ab05aff164736f6c63430008180033",
|
||||||
|
"deployedBytecode": "0x73000000000000000000000000000000000000000030146080604052600080fdfea2646970667358221220fec73f632edd47d0e7ae9933b51e8f4da78eb465818c4263bebf6623ab05aff164736f6c63430008180033",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-artifact-1",
|
||||||
|
"contractName": "Strings",
|
||||||
|
"sourceName": "@openzeppelin/contracts/utils/Strings.sol",
|
||||||
|
"abi": [
|
||||||
|
{
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "value",
|
||||||
|
"type": "uint256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalType": "uint256",
|
||||||
|
"name": "length",
|
||||||
|
"type": "uint256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "StringsInsufficientHexLength",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "StringsInvalidAddressFormat",
|
||||||
|
"type": "error"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"inputs": [],
|
||||||
|
"name": "StringsInvalidChar",
|
||||||
|
"type": "error"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"bytecode": "0x60566037600b82828239805160001a607314602a57634e487b7160e01b600052600060045260246000fd5b30600052607381538281f3fe73000000000000000000000000000000000000000030146080604052600080fdfea2646970667358221220d3fa6b95cf4f76e64227a9b2373ccb228efd9715fd7983e0646867999cceb9fb64736f6c63430008180033",
|
||||||
|
"deployedBytecode": "0x73000000000000000000000000000000000000000030146080604052600080fdfea2646970667358221220d3fa6b95cf4f76e64227a9b2373ccb228efd9715fd7983e0646867999cceb9fb64736f6c63430008180033",
|
||||||
|
"linkReferences": {},
|
||||||
|
"deployedLinkReferences": {}
|
||||||
|
}
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"_format": "hh-sol-dbg-1",
|
||||||
|
"buildInfo": "../../../../../build-info/294516b4613c993159fa474ac6e49583.json"
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user