feat: add foreign key constraints and metrics for blockchain node

This commit is contained in:
oib
2025-09-28 06:04:30 +02:00
parent c1926136fb
commit fb60505cdf
189 changed files with 15678 additions and 158 deletions

View File

@ -0,0 +1,63 @@
from __future__ import annotations
import os
import sys
from pathlib import Path
import pytest
import pytest_asyncio
from redis.asyncio import Redis
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
BASE_DIR = Path(__file__).resolve().parents[2]
POOLHUB_SRC = BASE_DIR / "pool-hub" / "src"
if str(POOLHUB_SRC) not in sys.path:
sys.path.insert(0, str(POOLHUB_SRC))
from poolhub.models import Base
def _get_required_env(name: str) -> str:
value = os.getenv(name)
if not value:
pytest.skip(f"Set {name} to run Pool Hub integration tests")
return value
@pytest_asyncio.fixture()
async def db_engine() -> AsyncEngine:
dsn = _get_required_env("POOLHUB_TEST_POSTGRES_DSN")
engine = create_async_engine(dsn, pool_pre_ping=True)
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
yield engine
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await engine.dispose()
@pytest_asyncio.fixture
async def db_session(db_engine: AsyncEngine) -> AsyncSession:
session_factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
async with session_factory() as session:
yield session
await session.rollback()
@pytest_asyncio.fixture()
async def redis_client() -> Redis:
redis_url = _get_required_env("POOLHUB_TEST_REDIS_URL")
client = Redis.from_url(redis_url, encoding="utf-8", decode_responses=True)
await client.flushdb()
yield client
await client.flushdb()
await client.close()
@pytest_asyncio.fixture(autouse=True)
async def _clear_redis(redis_client: Redis) -> None:
await redis_client.flushdb()

View File

@ -0,0 +1,153 @@
from __future__ import annotations
import uuid
import pytest
import pytest_asyncio
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import async_sessionmaker
from poolhub.app import deps
from poolhub.app.main import create_app
from poolhub.app.prometheus import reset_metrics
from poolhub.repositories.miner_repository import MinerRepository
@pytest_asyncio.fixture()
async def async_client(db_engine, redis_client): # noqa: F811
async def _session_override():
factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
async with factory() as session:
yield session
async def _redis_override():
yield redis_client
app = create_app()
app.dependency_overrides.clear()
app.dependency_overrides[deps.db_session_dep] = _session_override
app.dependency_overrides[deps.redis_dep] = _redis_override
reset_metrics()
async with AsyncClient(app=app, base_url="http://testserver") as client:
yield client
app.dependency_overrides.clear()
@pytest.mark.asyncio
async def test_match_endpoint(async_client, db_session, redis_client): # noqa: F811
repo = MinerRepository(db_session, redis_client)
await repo.register_miner(
miner_id="miner-1",
api_key_hash="hash",
addr="127.0.0.1",
proto="grpc",
gpu_vram_gb=16,
gpu_name="A100",
cpu_cores=32,
ram_gb=128,
max_parallel=4,
base_price=0.8,
tags={"tier": "gold"},
capabilities=["embedding"],
region="eu",
)
await db_session.commit()
response = await async_client.post(
"/v1/match",
json={
"job_id": "job-123",
"requirements": {"min_vram_gb": 8},
"hints": {"region": "eu"},
"top_k": 1,
},
)
assert response.status_code == 200
payload = response.json()
assert payload["job_id"] == "job-123"
assert len(payload["candidates"]) == 1
@pytest.mark.asyncio
async def test_match_endpoint_no_miners(async_client):
response = await async_client.post(
"/v1/match",
json={"job_id": "empty", "requirements": {}, "hints": {}, "top_k": 2},
)
assert response.status_code == 200
payload = response.json()
assert payload["candidates"] == []
@pytest.mark.asyncio
async def test_health_endpoint(async_client): # noqa: F811
response = await async_client.get("/v1/health")
assert response.status_code == 200
data = response.json()
assert data["status"] in {"ok", "degraded"}
assert "db_error" in data
assert "redis_error" in data
@pytest.mark.asyncio
async def test_health_endpoint_degraded(db_engine, redis_client): # noqa: F811
async def _session_override():
factory = async_sessionmaker(db_engine, expire_on_commit=False, autoflush=False)
async with factory() as session:
yield session
class FailingRedis:
async def ping(self) -> None:
raise RuntimeError("redis down")
def __getattr__(self, _: str) -> None: # pragma: no cover - minimal stub
raise RuntimeError("redis down")
async def _redis_override():
yield FailingRedis()
app = create_app()
app.dependency_overrides.clear()
app.dependency_overrides[deps.db_session_dep] = _session_override
app.dependency_overrides[deps.redis_dep] = _redis_override
reset_metrics()
async with AsyncClient(app=app, base_url="http://testserver") as client:
response = await client.get("/v1/health")
assert response.status_code == 200
payload = response.json()
assert payload["status"] == "degraded"
assert payload["redis_error"]
assert payload["db_error"] is None
app.dependency_overrides.clear()
@pytest.mark.asyncio
async def test_metrics_endpoint(async_client):
baseline = await async_client.get("/metrics")
before = _extract_counter(baseline.text, "poolhub_match_requests_total")
for _ in range(2):
await async_client.post(
"/v1/match",
json={"job_id": str(uuid.uuid4()), "requirements": {}, "hints": {}, "top_k": 1},
)
updated = await async_client.get("/metrics")
after = _extract_counter(updated.text, "poolhub_match_requests_total")
assert after >= before + 2
def _extract_counter(metrics_text: str, metric: str) -> float:
for line in metrics_text.splitlines():
if line.startswith(metric):
parts = line.split()
if len(parts) >= 2:
try:
return float(parts[1])
except ValueError: # pragma: no cover
return 0.0
return 0.0

View File

@ -0,0 +1,96 @@
from __future__ import annotations
import json
import uuid
import pytest
from poolhub.repositories.feedback_repository import FeedbackRepository
from poolhub.repositories.match_repository import MatchRepository
from poolhub.repositories.miner_repository import MinerRepository
from poolhub.storage.redis_keys import RedisKeys
@pytest.mark.asyncio
async def test_register_miner_persists_and_syncs(db_session, redis_client):
repo = MinerRepository(db_session, redis_client)
await repo.register_miner(
miner_id="miner-1",
api_key_hash="hash",
addr="127.0.0.1",
proto="grpc",
gpu_vram_gb=16,
gpu_name="A100",
cpu_cores=32,
ram_gb=128,
max_parallel=4,
base_price=0.8,
tags={"tier": "gold"},
capabilities=["embedding"],
region="eu",
)
miner = await repo.get_miner("miner-1")
assert miner is not None
assert miner.addr == "127.0.0.1"
redis_hash = await redis_client.hgetall(RedisKeys.miner_hash("miner-1"))
assert redis_hash["miner_id"] == "miner-1"
ranking = await redis_client.zscore(RedisKeys.miner_rankings("eu"), "miner-1")
assert ranking is not None
@pytest.mark.asyncio
async def test_match_request_flow(db_session, redis_client):
match_repo = MatchRepository(db_session, redis_client)
req = await match_repo.create_request(
job_id="job-123",
requirements={"min_vram_gb": 8},
hints={"region": "eu"},
top_k=2,
)
await db_session.commit()
queue_entry = await redis_client.lpop(RedisKeys.match_requests())
assert queue_entry is not None
payload = json.loads(queue_entry)
assert payload["job_id"] == "job-123"
await match_repo.add_results(
request_id=req.id,
candidates=[
{"miner_id": "miner-1", "score": 0.9, "explain": "fit"},
{"miner_id": "miner-2", "score": 0.8, "explain": "backup"},
],
)
await db_session.commit()
results = await match_repo.list_results_for_job("job-123")
assert len(results) == 2
redis_results = await redis_client.lrange(RedisKeys.match_results("job-123"), 0, -1)
assert len(redis_results) == 2
@pytest.mark.asyncio
async def test_feedback_repository(db_session, redis_client):
feedback_repo = FeedbackRepository(db_session, redis_client)
feedback = await feedback_repo.add_feedback(
job_id="job-321",
miner_id="miner-1",
outcome="completed",
latency_ms=1200,
tokens_spent=1.5,
)
await db_session.commit()
rows = await feedback_repo.list_feedback_for_job("job-321")
assert len(rows) == 1
assert rows[0].outcome == "completed"
# Ensure Redis publish occurred by checking pubsub message count via monitor list (best effort)
# Redis doesn't buffer publishes for inspection, so this is a smoke check ensuring repository returns object
assert feedback.miner_id == "miner-1"