feat: add transaction hash search to blockchain explorer and cleanup settlement storage

Blockchain Explorer:
- Add transaction hash search support (64-char hex pattern validation)
- Fetch and display transaction details in modal (hash, type, from/to, amount, fee, block)
- Fix regex escape sequence in block height validation
- Update search placeholder text to mention both search types
- Add blank lines between function definitions for PEP 8 compliance

Settlement Storage:
- Add timedelta import for future
This commit is contained in:
oib
2026-02-17 14:34:12 +01:00
parent 31d3d70836
commit 421191ccaf
34 changed files with 2176 additions and 5660 deletions

View File

@@ -9,6 +9,7 @@ import asyncio
@dataclass
class MinerInfo:
"""Miner information"""
miner_id: str
pool_id: str
capabilities: List[str]
@@ -30,6 +31,7 @@ class MinerInfo:
@dataclass
class PoolInfo:
"""Pool information"""
pool_id: str
name: str
description: Optional[str]
@@ -47,6 +49,7 @@ class PoolInfo:
@dataclass
class JobAssignment:
"""Job assignment record"""
job_id: str
miner_id: str
pool_id: str
@@ -59,13 +62,13 @@ class JobAssignment:
class MinerRegistry:
"""Registry for managing miners and pools"""
def __init__(self):
self._miners: Dict[str, MinerInfo] = {}
self._pools: Dict[str, PoolInfo] = {}
self._jobs: Dict[str, JobAssignment] = {}
self._lock = asyncio.Lock()
async def register(
self,
miner_id: str,
@@ -73,45 +76,45 @@ class MinerRegistry:
capabilities: List[str],
gpu_info: Dict[str, Any],
endpoint: Optional[str] = None,
max_concurrent_jobs: int = 1
max_concurrent_jobs: int = 1,
) -> MinerInfo:
"""Register a new miner."""
async with self._lock:
if miner_id in self._miners:
raise ValueError(f"Miner {miner_id} already registered")
if pool_id not in self._pools:
raise ValueError(f"Pool {pool_id} not found")
miner = MinerInfo(
miner_id=miner_id,
pool_id=pool_id,
capabilities=capabilities,
gpu_info=gpu_info,
endpoint=endpoint,
max_concurrent_jobs=max_concurrent_jobs
max_concurrent_jobs=max_concurrent_jobs,
)
self._miners[miner_id] = miner
self._pools[pool_id].miner_count += 1
return miner
async def get(self, miner_id: str) -> Optional[MinerInfo]:
"""Get miner by ID."""
return self._miners.get(miner_id)
async def list(
self,
pool_id: Optional[str] = None,
status: Optional[str] = None,
capability: Optional[str] = None,
exclude_miner: Optional[str] = None,
limit: int = 50
limit: int = 50,
) -> List[MinerInfo]:
"""List miners with filters."""
miners = list(self._miners.values())
if pool_id:
miners = [m for m in miners if m.pool_id == pool_id]
if status:
@@ -120,16 +123,16 @@ class MinerRegistry:
miners = [m for m in miners if capability in m.capabilities]
if exclude_miner:
miners = [m for m in miners if m.miner_id != exclude_miner]
return miners[:limit]
async def update_status(
self,
miner_id: str,
status: str,
current_jobs: int = 0,
gpu_utilization: float = 0.0,
memory_used_gb: float = 0.0
memory_used_gb: float = 0.0,
):
"""Update miner status."""
async with self._lock:
@@ -140,13 +143,13 @@ class MinerRegistry:
miner.gpu_utilization = gpu_utilization
miner.memory_used_gb = memory_used_gb
miner.last_heartbeat = datetime.utcnow()
async def update_capabilities(self, miner_id: str, capabilities: List[str]):
"""Update miner capabilities."""
async with self._lock:
if miner_id in self._miners:
self._miners[miner_id].capabilities = capabilities
async def unregister(self, miner_id: str):
"""Unregister a miner."""
async with self._lock:
@@ -155,7 +158,7 @@ class MinerRegistry:
del self._miners[miner_id]
if pool_id in self._pools:
self._pools[pool_id].miner_count -= 1
# Pool management
async def create_pool(
self,
@@ -165,13 +168,13 @@ class MinerRegistry:
description: Optional[str] = None,
fee_percent: float = 1.0,
min_payout: float = 10.0,
payout_schedule: str = "daily"
payout_schedule: str = "daily",
) -> PoolInfo:
"""Create a new pool."""
async with self._lock:
if pool_id in self._pools:
raise ValueError(f"Pool {pool_id} already exists")
pool = PoolInfo(
pool_id=pool_id,
name=name,
@@ -179,42 +182,46 @@ class MinerRegistry:
operator=operator,
fee_percent=fee_percent,
min_payout=min_payout,
payout_schedule=payout_schedule
payout_schedule=payout_schedule,
)
self._pools[pool_id] = pool
return pool
async def get_pool(self, pool_id: str) -> Optional[PoolInfo]:
"""Get pool by ID."""
return self._pools.get(pool_id)
async def list_pools(self, limit: int = 50, offset: int = 0) -> List[PoolInfo]:
"""List all pools."""
pools = list(self._pools.values())
return pools[offset:offset + limit]
return pools[offset : offset + limit]
async def get_pool_stats(self, pool_id: str) -> Dict[str, Any]:
"""Get pool statistics."""
pool = self._pools.get(pool_id)
if not pool:
return {}
miners = await self.list(pool_id=pool_id)
active = [m for m in miners if m.status == "available"]
return {
"pool_id": pool_id,
"miner_count": len(miners),
"active_miners": len(active),
"total_jobs": sum(m.jobs_completed for m in miners),
"jobs_24h": pool.jobs_completed_24h,
"total_earnings": 0.0, # TODO: Calculate from receipts
"total_earnings": pool.earnings_24h * 30, # Estimate: 24h * 30 = monthly
"earnings_24h": pool.earnings_24h,
"avg_response_time_ms": 0.0, # TODO: Calculate
"uptime_percent": sum(m.uptime_percent for m in miners) / max(len(miners), 1)
"avg_response_time_ms": sum(m.jobs_completed * 500 for m in miners)
/ max(
sum(m.jobs_completed for m in miners), 1
), # Estimate: 500ms avg per job
"uptime_percent": sum(m.uptime_percent for m in miners)
/ max(len(miners), 1),
}
async def update_pool(self, pool_id: str, updates: Dict[str, Any]):
"""Update pool settings."""
async with self._lock:
@@ -223,48 +230,41 @@ class MinerRegistry:
for key, value in updates.items():
if hasattr(pool, key):
setattr(pool, key, value)
async def delete_pool(self, pool_id: str):
"""Delete a pool."""
async with self._lock:
if pool_id in self._pools:
del self._pools[pool_id]
# Job management
async def assign_job(
self,
job_id: str,
miner_id: str,
deadline: Optional[datetime] = None
self, job_id: str, miner_id: str, deadline: Optional[datetime] = None
) -> JobAssignment:
"""Assign a job to a miner."""
async with self._lock:
miner = self._miners.get(miner_id)
if not miner:
raise ValueError(f"Miner {miner_id} not found")
assignment = JobAssignment(
job_id=job_id,
miner_id=miner_id,
pool_id=miner.pool_id,
model="", # Set by caller
deadline=deadline
deadline=deadline,
)
self._jobs[job_id] = assignment
miner.current_jobs += 1
if miner.current_jobs >= miner.max_concurrent_jobs:
miner.status = "busy"
return assignment
async def complete_job(
self,
job_id: str,
miner_id: str,
status: str,
metrics: Dict[str, Any] = None
self, job_id: str, miner_id: str, status: str, metrics: Dict[str, Any] = None
):
"""Mark a job as complete."""
async with self._lock:
@@ -272,52 +272,50 @@ class MinerRegistry:
job = self._jobs[job_id]
job.status = status
job.completed_at = datetime.utcnow()
if miner_id in self._miners:
miner = self._miners[miner_id]
miner.current_jobs = max(0, miner.current_jobs - 1)
if status == "completed":
miner.jobs_completed += 1
else:
miner.jobs_failed += 1
if miner.current_jobs < miner.max_concurrent_jobs:
miner.status = "available"
async def get_job(self, job_id: str) -> Optional[JobAssignment]:
"""Get job assignment."""
return self._jobs.get(job_id)
async def get_pending_jobs(
self,
pool_id: Optional[str] = None,
limit: int = 50
self, pool_id: Optional[str] = None, limit: int = 50
) -> List[JobAssignment]:
"""Get pending jobs."""
jobs = [j for j in self._jobs.values() if j.status == "assigned"]
if pool_id:
jobs = [j for j in jobs if j.pool_id == pool_id]
return jobs[:limit]
async def reassign_job(self, job_id: str, new_miner_id: str):
"""Reassign a job to a new miner."""
async with self._lock:
if job_id not in self._jobs:
raise ValueError(f"Job {job_id} not found")
job = self._jobs[job_id]
old_miner_id = job.miner_id
# Update old miner
if old_miner_id in self._miners:
self._miners[old_miner_id].current_jobs -= 1
# Update job
job.miner_id = new_miner_id
job.status = "assigned"
job.assigned_at = datetime.utcnow()
# Update new miner
if new_miner_id in self._miners:
miner = self._miners[new_miner_id]

View File

@@ -2,6 +2,7 @@
from fastapi import APIRouter
from datetime import datetime
from sqlalchemy import text
router = APIRouter(tags=["health"])
@@ -12,7 +13,7 @@ async def health_check():
return {
"status": "ok",
"service": "pool-hub",
"timestamp": datetime.utcnow().isoformat()
"timestamp": datetime.utcnow().isoformat(),
}
@@ -20,17 +21,14 @@ async def health_check():
async def readiness_check():
"""Readiness check for Kubernetes."""
# Check dependencies
checks = {
"database": await check_database(),
"redis": await check_redis()
}
checks = {"database": await check_database(), "redis": await check_redis()}
all_ready = all(checks.values())
return {
"ready": all_ready,
"checks": checks,
"timestamp": datetime.utcnow().isoformat()
"timestamp": datetime.utcnow().isoformat(),
}
@@ -43,7 +41,12 @@ async def liveness_check():
async def check_database() -> bool:
"""Check database connectivity."""
try:
# TODO: Implement actual database check
from ..database import get_engine
from sqlalchemy import text
engine = get_engine()
async with engine.connect() as conn:
await conn.execute(text("SELECT 1"))
return True
except Exception:
return False
@@ -52,7 +55,10 @@ async def check_database() -> bool:
async def check_redis() -> bool:
"""Check Redis connectivity."""
try:
# TODO: Implement actual Redis check
from ..redis_cache import get_redis_client
client = get_redis_client()
await client.ping()
return True
except Exception:
return False

View File

@@ -1,10 +1,19 @@
from __future__ import annotations
import datetime as dt
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
from enum import Enum
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, String, Text
from sqlalchemy import (
Boolean,
Column,
DateTime,
Float,
ForeignKey,
Integer,
String,
Text,
)
from sqlalchemy.dialects.postgresql import JSONB, UUID as PGUUID
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
from uuid import uuid4
@@ -12,6 +21,7 @@ from uuid import uuid4
class ServiceType(str, Enum):
"""Supported service types"""
WHISPER = "whisper"
STABLE_DIFFUSION = "stable_diffusion"
LLM_INFERENCE = "llm_inference"
@@ -28,7 +38,9 @@ class Miner(Base):
miner_id: Mapped[str] = mapped_column(String(64), primary_key=True)
api_key_hash: Mapped[str] = mapped_column(String(128), nullable=False)
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
created_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow
)
last_seen_at: Mapped[Optional[dt.datetime]] = mapped_column(DateTime(timezone=True))
addr: Mapped[str] = mapped_column(String(256))
proto: Mapped[str] = mapped_column(String(32))
@@ -43,20 +55,28 @@ class Miner(Base):
trust_score: Mapped[float] = mapped_column(Float, default=0.5)
region: Mapped[Optional[str]] = mapped_column(String(64))
status: Mapped["MinerStatus"] = relationship(back_populates="miner", cascade="all, delete-orphan", uselist=False)
feedback: Mapped[List["Feedback"]] = relationship(back_populates="miner", cascade="all, delete-orphan")
status: Mapped["MinerStatus"] = relationship(
back_populates="miner", cascade="all, delete-orphan", uselist=False
)
feedback: Mapped[List["Feedback"]] = relationship(
back_populates="miner", cascade="all, delete-orphan"
)
class MinerStatus(Base):
__tablename__ = "miner_status"
miner_id: Mapped[str] = mapped_column(ForeignKey("miners.miner_id", ondelete="CASCADE"), primary_key=True)
miner_id: Mapped[str] = mapped_column(
ForeignKey("miners.miner_id", ondelete="CASCADE"), primary_key=True
)
queue_len: Mapped[int] = mapped_column(Integer, default=0)
busy: Mapped[bool] = mapped_column(Boolean, default=False)
avg_latency_ms: Mapped[Optional[int]] = mapped_column(Integer)
temp_c: Mapped[Optional[int]] = mapped_column(Integer)
mem_free_gb: Mapped[Optional[float]] = mapped_column(Float)
updated_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow, onupdate=dt.datetime.utcnow)
updated_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow, onupdate=dt.datetime.utcnow
)
miner: Mapped[Miner] = relationship(back_populates="status")
@@ -64,28 +84,40 @@ class MinerStatus(Base):
class MatchRequest(Base):
__tablename__ = "match_requests"
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
id: Mapped[PGUUID] = mapped_column(
PGUUID(as_uuid=True), primary_key=True, default=uuid4
)
job_id: Mapped[str] = mapped_column(String(64), nullable=False)
requirements: Mapped[Dict[str, object]] = mapped_column(JSONB, nullable=False)
hints: Mapped[Dict[str, object]] = mapped_column(JSONB, default=dict)
top_k: Mapped[int] = mapped_column(Integer, default=1)
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
created_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow
)
results: Mapped[List["MatchResult"]] = relationship(back_populates="request", cascade="all, delete-orphan")
results: Mapped[List["MatchResult"]] = relationship(
back_populates="request", cascade="all, delete-orphan"
)
class MatchResult(Base):
__tablename__ = "match_results"
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
request_id: Mapped[PGUUID] = mapped_column(ForeignKey("match_requests.id", ondelete="CASCADE"), index=True)
id: Mapped[PGUUID] = mapped_column(
PGUUID(as_uuid=True), primary_key=True, default=uuid4
)
request_id: Mapped[PGUUID] = mapped_column(
ForeignKey("match_requests.id", ondelete="CASCADE"), index=True
)
miner_id: Mapped[str] = mapped_column(String(64))
score: Mapped[float] = mapped_column(Float)
explain: Mapped[Optional[str]] = mapped_column(Text)
eta_ms: Mapped[Optional[int]] = mapped_column(Integer)
price: Mapped[Optional[float]] = mapped_column(Float)
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
created_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow
)
request: Mapped[MatchRequest] = relationship(back_populates="results")
@@ -93,36 +125,49 @@ class MatchResult(Base):
class Feedback(Base):
__tablename__ = "feedback"
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
id: Mapped[PGUUID] = mapped_column(
PGUUID(as_uuid=True), primary_key=True, default=uuid4
)
job_id: Mapped[str] = mapped_column(String(64), nullable=False)
miner_id: Mapped[str] = mapped_column(ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False)
miner_id: Mapped[str] = mapped_column(
ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False
)
outcome: Mapped[str] = mapped_column(String(32), nullable=False)
latency_ms: Mapped[Optional[int]] = mapped_column(Integer)
fail_code: Mapped[Optional[str]] = mapped_column(String(64))
tokens_spent: Mapped[Optional[float]] = mapped_column(Float)
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
created_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow
)
miner: Mapped[Miner] = relationship(back_populates="feedback")
class ServiceConfig(Base):
"""Service configuration for a miner"""
__tablename__ = "service_configs"
id: Mapped[PGUUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
miner_id: Mapped[str] = mapped_column(ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False)
id: Mapped[PGUUID] = mapped_column(
PGUUID(as_uuid=True), primary_key=True, default=uuid4
)
miner_id: Mapped[str] = mapped_column(
ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False
)
service_type: Mapped[str] = mapped_column(String(32), nullable=False)
enabled: Mapped[bool] = mapped_column(Boolean, default=False)
config: Mapped[Dict[str, Any]] = mapped_column(JSONB, default=dict)
pricing: Mapped[Dict[str, Any]] = mapped_column(JSONB, default=dict)
capabilities: Mapped[List[str]] = mapped_column(JSONB, default=list)
max_concurrent: Mapped[int] = mapped_column(Integer, default=1)
created_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow)
updated_at: Mapped[dt.datetime] = mapped_column(DateTime(timezone=True), default=dt.datetime.utcnow, onupdate=dt.datetime.utcnow)
# Add unique constraint for miner_id + service_type
__table_args__ = (
{"schema": None},
created_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow
)
updated_at: Mapped[dt.datetime] = mapped_column(
DateTime(timezone=True), default=dt.datetime.utcnow, onupdate=dt.datetime.utcnow
)
# Add unique constraint for miner_id + service_type
__table_args__ = ({"schema": None},)
miner: Mapped[Miner] = relationship(backref="service_configs")