chore: enhance .gitignore and remove obsolete documentation files

- Reorganize .gitignore with categorized sections for better maintainability
- Add comprehensive ignore patterns for Python, Node.js, databases, logs, and build artifacts
- Add project-specific ignore rules for coordinator, explorer, and deployment files
- Remove outdated documentation: BITCOIN-WALLET-SETUP.md, LOCAL_ASSETS_SUMMARY.md, README-CONTAINER-DEPLOYMENT.md, README-DOMAIN-DEPLOYMENT.md
```
This commit is contained in:
oib
2026-01-24 14:44:51 +01:00
parent 99bf335970
commit 9b9c5beb23
214 changed files with 25558 additions and 171 deletions

View File

@@ -0,0 +1,57 @@
"""Coordinator API configuration with PostgreSQL support"""
from pydantic_settings import BaseSettings
from typing import Optional
class Settings(BaseSettings):
"""Application settings"""
# API Configuration
api_host: str = "0.0.0.0"
api_port: int = 8000
api_prefix: str = "/v1"
debug: bool = False
# Database Configuration
database_url: str = "postgresql://aitbc_user:aitbc_password@localhost:5432/aitbc_coordinator"
# JWT Configuration
jwt_secret: str = "your-secret-key-change-in-production"
jwt_algorithm: str = "HS256"
jwt_expiration_hours: int = 24
# Job Configuration
default_job_ttl_seconds: int = 3600 # 1 hour
max_job_ttl_seconds: int = 86400 # 24 hours
job_cleanup_interval_seconds: int = 300 # 5 minutes
# Miner Configuration
miner_heartbeat_timeout_seconds: int = 120 # 2 minutes
miner_max_inflight: int = 10
# Marketplace Configuration
marketplace_offer_ttl_seconds: int = 3600 # 1 hour
# Wallet Configuration
wallet_rpc_url: str = "http://localhost:9080"
# CORS Configuration
cors_origins: list[str] = [
"http://localhost:3000",
"http://localhost:8080",
"https://aitbc.bubuit.net",
"https://aitbc.bubuit.net:8080"
]
# Logging Configuration
log_level: str = "INFO"
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
# Create global settings instance
settings = Settings()

View File

@@ -15,6 +15,7 @@ from .routers import (
services,
marketplace_offers,
zk_applications,
explorer,
)
from .routers import zk_applications
from .routers.governance import router as governance
@@ -51,6 +52,7 @@ def create_app() -> FastAPI:
app.include_router(zk_applications.router, prefix="/v1")
app.include_router(governance, prefix="/v1")
app.include_router(partners, prefix="/v1")
app.include_router(explorer, prefix="/v1")
# Add Prometheus metrics endpoint
metrics_app = make_asgi_app()

View File

@@ -1,4 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlmodel import select
from ..deps import require_admin_key
from ..services import JobService, MinerService
@@ -53,7 +54,7 @@ async def list_miners(session: SessionDep, admin_key: str = Depends(require_admi
miner_service = MinerService(session)
miners = [
{
"miner_id": record.miner_id,
"miner_id": record.id,
"status": record.status,
"inflight": record.inflight,
"concurrency": record.concurrency,

View File

@@ -2,6 +2,7 @@ from fastapi import APIRouter, Depends, HTTPException, status
from ..deps import require_client_key
from ..schemas import JobCreate, JobView, JobResult
from ..types import JobState
from ..services import JobService
from ..storage import SessionDep

View File

@@ -73,7 +73,7 @@ async def submit_result(
duration_ms = int((datetime.utcnow() - job.requested_at).total_seconds() * 1000)
metrics["duration_ms"] = duration_ms
receipt = receipt_service.create_receipt(job, miner_id, req.result, metrics)
receipt = await receipt_service.create_receipt(job, miner_id, req.result, metrics)
job.receipt = receipt
job.receipt_id = receipt["receipt_id"] if receipt else None
session.add(job)

View File

@@ -20,9 +20,9 @@ class PartnerRegister(BaseModel):
"""Register a new partner application"""
name: str = Field(..., min_length=3, max_length=100)
description: str = Field(..., min_length=10, max_length=500)
website: str = Field(..., regex=r'^https?://')
contact: str = Field(..., regex=r'^[^@]+@[^@]+\.[^@]+$')
integration_type: str = Field(..., regex="^(explorer|analytics|wallet|exchange|other)$")
website: str = Field(..., pattern=r'^https?://')
contact: str = Field(..., pattern=r'^[^@]+@[^@]+\.[^@]+$')
integration_type: str = Field(..., pattern="^(explorer|analytics|wallet|exchange|other)$")
class PartnerResponse(BaseModel):
@@ -36,7 +36,7 @@ class PartnerResponse(BaseModel):
class WebhookCreate(BaseModel):
"""Create a webhook subscription"""
url: str = Field(..., regex=r'^https?://')
url: str = Field(..., pattern=r'^https?://')
events: List[str] = Field(..., min_items=1)
secret: Optional[str] = Field(max_length=100)

View File

@@ -195,6 +195,7 @@ class ReceiptSummary(BaseModel):
model_config = ConfigDict(populate_by_name=True)
receiptId: str
jobId: Optional[str] = None
miner: str
coordinator: str
issuedAt: datetime

View File

@@ -50,7 +50,7 @@ class ExplorerService:
height=height,
hash=job.id,
timestamp=job.requested_at,
tx_count=1,
txCount=1,
proposer=proposer,
)
)
@@ -71,13 +71,22 @@ class ExplorerService:
for index, job in enumerate(jobs):
height = _DEFAULT_HEIGHT_BASE + offset + index
status_label = _STATUS_LABELS.get(job.state, job.state.value.title())
value = job.payload.get("value") if isinstance(job.payload, dict) else None
if value is None:
value_str = "0"
elif isinstance(value, (int, float)):
value_str = f"{value}"
else:
value_str = str(value)
# Try to get payment amount from receipt
value_str = "0"
if job.receipt and isinstance(job.receipt, dict):
price = job.receipt.get("price")
if price is not None:
value_str = f"{price}"
# Fallback to payload value if no receipt
if value_str == "0":
value = job.payload.get("value") if isinstance(job.payload, dict) else None
if value is not None:
if isinstance(value, (int, float)):
value_str = f"{value}"
else:
value_str = str(value)
items.append(
TransactionSummary(
@@ -100,14 +109,16 @@ class ExplorerService:
address_map: dict[str, dict[str, object]] = defaultdict(
lambda: {
"address": "",
"balance": "0",
"balance": 0.0,
"tx_count": 0,
"last_active": datetime.min,
"recent_transactions": deque(maxlen=5),
"earned": 0.0,
"spent": 0.0,
}
)
def touch(address: Optional[str], tx_id: str, when: datetime, value_hint: Optional[str] = None) -> None:
def touch(address: Optional[str], tx_id: str, when: datetime, earned: float = 0.0, spent: float = 0.0) -> None:
if not address:
return
entry = address_map[address]
@@ -115,18 +126,27 @@ class ExplorerService:
entry["tx_count"] = int(entry["tx_count"]) + 1
if when > entry["last_active"]:
entry["last_active"] = when
if value_hint:
entry["balance"] = value_hint
# Track earnings and spending
entry["earned"] = float(entry["earned"]) + earned
entry["spent"] = float(entry["spent"]) + spent
entry["balance"] = float(entry["earned"]) - float(entry["spent"])
recent: deque[str] = entry["recent_transactions"] # type: ignore[assignment]
recent.appendleft(tx_id)
for job in jobs:
value = job.payload.get("value") if isinstance(job.payload, dict) else None
value_hint: Optional[str] = None
if value is not None:
value_hint = str(value)
touch(job.client_id, job.id, job.requested_at, value_hint=value_hint)
touch(job.assigned_miner_id, job.id, job.requested_at)
# Get payment amount from receipt if available
price = 0.0
if job.receipt and isinstance(job.receipt, dict):
receipt_price = job.receipt.get("price")
if receipt_price is not None:
try:
price = float(receipt_price)
except (TypeError, ValueError):
pass
# Miner earns, client spends
touch(job.assigned_miner_id, job.id, job.requested_at, earned=price)
touch(job.client_id, job.id, job.requested_at, spent=price)
sorted_addresses = sorted(
address_map.values(),
@@ -138,7 +158,7 @@ class ExplorerService:
items = [
AddressSummary(
address=entry["address"],
balance=str(entry["balance"]),
balance=f"{float(entry['balance']):.6f}",
txCount=int(entry["tx_count"]),
lastActive=entry["last_active"],
recentTransactions=list(entry["recent_transactions"]),
@@ -164,19 +184,24 @@ class ExplorerService:
items: list[ReceiptSummary] = []
for row in rows:
payload = row.payload or {}
miner = payload.get("miner") or payload.get("miner_id") or "unknown"
coordinator = payload.get("coordinator") or payload.get("coordinator_id") or "unknown"
# Extract miner from provider field (receipt format) or fallback
miner = payload.get("provider") or payload.get("miner") or payload.get("miner_id") or "unknown"
# Extract client as coordinator (receipt format) or fallback
coordinator = payload.get("client") or payload.get("coordinator") or payload.get("coordinator_id") or "unknown"
status = payload.get("status") or payload.get("state") or "Unknown"
# Get job_id from payload
job_id_from_payload = payload.get("job_id") or row.job_id
items.append(
ReceiptSummary(
receipt_id=row.receipt_id,
receiptId=row.receipt_id,
miner=miner,
coordinator=coordinator,
issued_at=row.created_at,
issuedAt=row.created_at,
status=status,
payload=payload,
jobId=job_id_from_payload,
)
)
resolved_job_id = job_id or "all"
return ReceiptListResponse(job_id=resolved_job_id, items=items)
return ReceiptListResponse(jobId=resolved_job_id, items=items)

View File

@@ -101,7 +101,7 @@ class JobService:
return None
def _ensure_not_expired(self, job: Job) -> Job:
if job.state == JobState.queued and job.expires_at <= datetime.utcnow():
if job.state in {JobState.queued, JobState.running} and job.expires_at <= datetime.utcnow():
job.state = JobState.expired
job.error = "job expired"
self.session.add(job)

View File

@@ -32,6 +32,7 @@ class MinerService:
miner.concurrency = payload.concurrency
miner.region = payload.region
miner.session_token = session_token
miner.inflight = 0
miner.last_heartbeat = datetime.utcnow()
miner.status = "ONLINE"
self.session.commit()

View File

@@ -35,24 +35,60 @@ class ReceiptService:
) -> Dict[str, Any] | None:
if self._signer is None:
return None
metrics = result_metrics or {}
result_payload = job_result or {}
unit_type = _first_present([
metrics.get("unit_type"),
result_payload.get("unit_type"),
], default="gpu_seconds")
units = _coerce_float(_first_present([
metrics.get("units"),
result_payload.get("units"),
]))
if units is None:
duration_ms = _coerce_float(metrics.get("duration_ms"))
if duration_ms is not None:
units = duration_ms / 1000.0
else:
duration_seconds = _coerce_float(_first_present([
metrics.get("duration_seconds"),
metrics.get("compute_time"),
result_payload.get("execution_time"),
result_payload.get("duration"),
]))
units = duration_seconds
if units is None:
units = 0.0
unit_price = _coerce_float(_first_present([
metrics.get("unit_price"),
result_payload.get("unit_price"),
]))
if unit_price is None:
unit_price = 0.02
price = _coerce_float(_first_present([
metrics.get("price"),
result_payload.get("price"),
metrics.get("aitbc_earned"),
result_payload.get("aitbc_earned"),
metrics.get("cost"),
result_payload.get("cost"),
]))
if price is None:
price = round(units * unit_price, 6)
payload = {
"version": "1.0",
"receipt_id": token_hex(16),
"job_id": job.id,
"provider": miner_id,
"client": job.client_id,
"units": _first_present([
(result_metrics or {}).get("units"),
(job_result or {}).get("units"),
], default=0.0),
"unit_type": _first_present([
(result_metrics or {}).get("unit_type"),
(job_result or {}).get("unit_type"),
], default="gpu_seconds"),
"price": _first_present([
(result_metrics or {}).get("price"),
(job_result or {}).get("price"),
]),
"status": job.state.value,
"units": units,
"unit_type": unit_type,
"unit_price": unit_price,
"price": price,
"started_at": int(job.requested_at.timestamp()) if job.requested_at else int(datetime.utcnow().timestamp()),
"completed_at": int(datetime.utcnow().timestamp()),
"metadata": {
@@ -105,3 +141,13 @@ def _first_present(values: list[Optional[Any]], default: Optional[Any] = None) -
if value is not None:
return value
return default
def _coerce_float(value: Any) -> Optional[float]:
"""Coerce a value to float, returning None if not possible"""
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError):
return None

View File

@@ -0,0 +1,223 @@
"""PostgreSQL database module for Coordinator API"""
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.pool import StaticPool
import psycopg2
from psycopg2.extras import RealDictCursor
from typing import Generator, Optional, Dict, Any, List
import json
from datetime import datetime
from decimal import Decimal
from .config_pg import settings
# SQLAlchemy setup for complex queries
engine = create_engine(
settings.database_url,
echo=settings.debug,
pool_pre_ping=True,
pool_recycle=300,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Direct PostgreSQL connection for performance
def get_pg_connection():
"""Get direct PostgreSQL connection"""
return psycopg2.connect(
host="localhost",
database="aitbc_coordinator",
user="aitbc_user",
password="aitbc_password",
port=5432,
cursor_factory=RealDictCursor
)
def get_db() -> Generator[Session, None, None]:
"""Get database session"""
db = SessionLocal()
try:
yield db
finally:
db.close()
class PostgreSQLAdapter:
"""PostgreSQL adapter for high-performance operations"""
def __init__(self):
self.connection = get_pg_connection()
def execute_query(self, query: str, params: tuple = None) -> List[Dict[str, Any]]:
"""Execute a query and return results"""
with self.connection.cursor() as cursor:
cursor.execute(query, params)
return cursor.fetchall()
def execute_update(self, query: str, params: tuple = None) -> int:
"""Execute an update/insert/delete query"""
with self.connection.cursor() as cursor:
cursor.execute(query, params)
self.connection.commit()
return cursor.rowcount
def execute_batch(self, query: str, params_list: List[tuple]) -> int:
"""Execute batch insert/update"""
with self.connection.cursor() as cursor:
cursor.executemany(query, params_list)
self.connection.commit()
return cursor.rowcount
def get_job_by_id(self, job_id: str) -> Optional[Dict[str, Any]]:
"""Get job by ID"""
query = "SELECT * FROM job WHERE id = %s"
results = self.execute_query(query, (job_id,))
return results[0] if results else None
def get_available_miners(self, region: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get available miners"""
if region:
query = """
SELECT * FROM miner
WHERE status = 'active'
AND inflight < concurrency
AND (region = %s OR region IS NULL)
ORDER BY last_heartbeat DESC
"""
return self.execute_query(query, (region,))
else:
query = """
SELECT * FROM miner
WHERE status = 'active'
AND inflight < concurrency
ORDER BY last_heartbeat DESC
"""
return self.execute_query(query)
def get_pending_jobs(self, limit: int = 100) -> List[Dict[str, Any]]:
"""Get pending jobs"""
query = """
SELECT * FROM job
WHERE state = 'pending'
AND expires_at > NOW()
ORDER BY requested_at ASC
LIMIT %s
"""
return self.execute_query(query, (limit,))
def update_job_state(self, job_id: str, state: str, **kwargs) -> bool:
"""Update job state"""
set_clauses = ["state = %s"]
params = [state, job_id]
for key, value in kwargs.items():
set_clauses.append(f"{key} = %s")
params.insert(-1, value)
query = f"""
UPDATE job
SET {', '.join(set_clauses)}, updated_at = NOW()
WHERE id = %s
"""
return self.execute_update(query, params) > 0
def get_marketplace_offers(self, status: str = "active") -> List[Dict[str, Any]]:
"""Get marketplace offers"""
query = """
SELECT * FROM marketplaceoffer
WHERE status = %s
ORDER BY price ASC, created_at DESC
"""
return self.execute_query(query, (status,))
def get_user_wallets(self, user_id: str) -> List[Dict[str, Any]]:
"""Get user wallets"""
query = """
SELECT * FROM wallet
WHERE user_id = %s
ORDER BY created_at DESC
"""
return self.execute_query(query, (user_id,))
def create_job(self, job_data: Dict[str, Any]) -> str:
"""Create a new job"""
query = """
INSERT INTO job (id, client_id, state, payload, constraints,
ttl_seconds, requested_at, expires_at)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
RETURNING id
"""
result = self.execute_query(query, (
job_data['id'],
job_data['client_id'],
job_data['state'],
json.dumps(job_data['payload']),
json.dumps(job_data.get('constraints', {})),
job_data['ttl_seconds'],
job_data['requested_at'],
job_data['expires_at']
))
return result[0]['id']
def cleanup_expired_jobs(self) -> int:
"""Clean up expired jobs"""
query = """
UPDATE job
SET state = 'expired', updated_at = NOW()
WHERE state = 'pending'
AND expires_at < NOW()
"""
return self.execute_update(query)
def get_miner_stats(self, miner_id: str) -> Optional[Dict[str, Any]]:
"""Get miner statistics"""
query = """
SELECT
COUNT(*) as total_jobs,
COUNT(CASE WHEN state = 'completed' THEN 1 END) as completed_jobs,
COUNT(CASE WHEN state = 'failed' THEN 1 END) as failed_jobs,
AVG(CASE WHEN state = 'completed' THEN EXTRACT(EPOCH FROM (updated_at - requested_at)) END) as avg_duration_seconds
FROM job
WHERE assigned_miner_id = %s
"""
results = self.execute_query(query, (miner_id,))
return results[0] if results else None
def close(self):
"""Close the connection"""
if self.connection:
self.connection.close()
# Global adapter instance
db_adapter = PostgreSQLAdapter()
# Database initialization
def init_db():
"""Initialize database tables"""
# Import models here to avoid circular imports
from .models import Base
# Create all tables
Base.metadata.create_all(bind=engine)
print("✅ PostgreSQL database initialized successfully!")
# Health check
def check_db_health() -> Dict[str, Any]:
"""Check database health"""
try:
result = db_adapter.execute_query("SELECT 1 as health_check")
return {
"status": "healthy",
"database": "postgresql",
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
return {
"status": "unhealthy",
"error": str(e),
"timestamp": datetime.utcnow().isoformat()
}