chore: enhance .gitignore and remove obsolete documentation files

- Reorganize .gitignore with categorized sections for better maintainability
- Add comprehensive ignore patterns for Python, Node.js, databases, logs, and build artifacts
- Add project-specific ignore rules for coordinator, explorer, and deployment files
- Remove outdated documentation: BITCOIN-WALLET-SETUP.md, LOCAL_ASSETS_SUMMARY.md, README-CONTAINER-DEPLOYMENT.md, README-DOMAIN-DEPLOYMENT.md
```
This commit is contained in:
oib
2026-01-24 14:44:51 +01:00
parent 99bf335970
commit 9b9c5beb23
214 changed files with 25558 additions and 171 deletions

View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
"""Complete migration script for Coordinator API"""
import sqlite3
import psycopg2
from psycopg2.extras import RealDictCursor
import json
from decimal import Decimal
# Database configurations
SQLITE_DB = "coordinator.db"
PG_CONFIG = {
"host": "localhost",
"database": "aitbc_coordinator",
"user": "aitbc_user",
"password": "aitbc_password",
"port": 5432
}
def migrate_all_data():
"""Migrate all data from SQLite to PostgreSQL"""
print("\nStarting complete data migration...")
# Connect to SQLite
sqlite_conn = sqlite3.connect(SQLITE_DB)
sqlite_conn.row_factory = sqlite3.Row
sqlite_cursor = sqlite_conn.cursor()
# Connect to PostgreSQL
pg_conn = psycopg2.connect(**PG_CONFIG)
pg_cursor = pg_conn.cursor()
# Get all tables
sqlite_cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = [row[0] for row in sqlite_cursor.fetchall()]
for table_name in tables:
if table_name == 'sqlite_sequence':
continue
print(f"\nMigrating {table_name}...")
# Get table schema
sqlite_cursor.execute(f"PRAGMA table_info({table_name})")
columns = sqlite_cursor.fetchall()
column_names = [col[1] for col in columns]
# Get data
sqlite_cursor.execute(f"SELECT * FROM {table_name}")
rows = sqlite_cursor.fetchall()
if not rows:
print(f" No data in {table_name}")
continue
# Build insert query
if table_name == 'user':
insert_sql = f'''
INSERT INTO "{table_name}" ({', '.join(column_names)})
VALUES ({', '.join(['%s'] * len(column_names))})
'''
else:
insert_sql = f'''
INSERT INTO {table_name} ({', '.join(column_names)})
VALUES ({', '.join(['%s'] * len(column_names))})
'''
# Insert data
count = 0
for row in rows:
values = []
for i, value in enumerate(row):
col_name = column_names[i]
# Handle special cases
if col_name in ['payload', 'constraints', 'result', 'receipt', 'capabilities',
'extra_metadata', 'sla', 'attributes', 'metadata'] and value:
if isinstance(value, str):
try:
value = json.loads(value)
except:
pass
elif col_name in ['balance', 'price', 'average_job_duration_ms'] and value is not None:
value = Decimal(str(value))
values.append(value)
try:
pg_cursor.execute(insert_sql, values)
count += 1
except Exception as e:
print(f" Error inserting row: {e}")
print(f" Values: {values}")
print(f" Migrated {count} rows from {table_name}")
pg_conn.commit()
sqlite_conn.close()
pg_conn.close()
print("\n✅ Complete migration finished!")
if __name__ == "__main__":
migrate_all_data()

View File

@@ -0,0 +1,318 @@
#!/usr/bin/env python3
"""Migration script for Coordinator API from SQLite to PostgreSQL"""
import os
import sys
from pathlib import Path
# Add the src directory to the path
sys.path.insert(0, str(Path(__file__).parent / "src"))
import sqlite3
import psycopg2
from psycopg2.extras import RealDictCursor
from datetime import datetime
from decimal import Decimal
import json
# Database configurations
SQLITE_DB = "coordinator.db"
PG_CONFIG = {
"host": "localhost",
"database": "aitbc_coordinator",
"user": "aitbc_user",
"password": "aitbc_password",
"port": 5432
}
def create_pg_schema():
"""Create PostgreSQL schema with optimized types"""
conn = psycopg2.connect(**PG_CONFIG)
cursor = conn.cursor()
print("Creating PostgreSQL schema...")
# Drop existing tables
cursor.execute("DROP TABLE IF EXISTS jobreceipt CASCADE")
cursor.execute("DROP TABLE IF EXISTS marketplacebid CASCADE")
cursor.execute("DROP TABLE IF EXISTS marketplaceoffer CASCADE")
cursor.execute("DROP TABLE IF EXISTS job CASCADE")
cursor.execute("DROP TABLE IF EXISTS usersession CASCADE")
cursor.execute("DROP TABLE IF EXISTS wallet CASCADE")
cursor.execute("DROP TABLE IF EXISTS miner CASCADE")
cursor.execute("DROP TABLE IF EXISTS transaction CASCADE")
cursor.execute("DROP TABLE IF EXISTS user CASCADE")
# Create user table
cursor.execute("""
CREATE TABLE user (
id VARCHAR(255) PRIMARY KEY,
email VARCHAR(255),
username VARCHAR(255),
status VARCHAR(20) CHECK (status IN ('active', 'inactive', 'suspended')),
created_at TIMESTAMP WITH TIME ZONE,
updated_at TIMESTAMP WITH TIME ZONE,
last_login TIMESTAMP WITH TIME ZONE
)
""")
# Create wallet table
cursor.execute("""
CREATE TABLE wallet (
id SERIAL PRIMARY KEY,
user_id VARCHAR(255) REFERENCES user(id),
address VARCHAR(255) UNIQUE,
balance NUMERIC(20, 8) DEFAULT 0,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
)
""")
# Create usersession table
cursor.execute("""
CREATE TABLE usersession (
id SERIAL PRIMARY KEY,
user_id VARCHAR(255) REFERENCES user(id),
token VARCHAR(255) UNIQUE,
expires_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
last_used TIMESTAMP WITH TIME ZONE DEFAULT NOW()
)
""")
# Create miner table
cursor.execute("""
CREATE TABLE miner (
id VARCHAR(255) PRIMARY KEY,
region VARCHAR(100),
capabilities JSONB,
concurrency INTEGER DEFAULT 1,
status VARCHAR(20) DEFAULT 'active',
inflight INTEGER DEFAULT 0,
extra_metadata JSONB,
last_heartbeat TIMESTAMP WITH TIME ZONE,
session_token VARCHAR(255),
last_job_at TIMESTAMP WITH TIME ZONE,
jobs_completed INTEGER DEFAULT 0,
jobs_failed INTEGER DEFAULT 0,
total_job_duration_ms BIGINT DEFAULT 0,
average_job_duration_ms NUMERIC(10, 2) DEFAULT 0,
last_receipt_id VARCHAR(255)
)
""")
# Create job table
cursor.execute("""
CREATE TABLE job (
id VARCHAR(255) PRIMARY KEY,
client_id VARCHAR(255),
state VARCHAR(20) CHECK (state IN ('pending', 'assigned', 'running', 'completed', 'failed', 'expired')),
payload JSONB,
constraints JSONB,
ttl_seconds INTEGER,
requested_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
expires_at TIMESTAMP WITH TIME ZONE,
assigned_miner_id VARCHAR(255) REFERENCES miner(id),
result JSONB,
receipt JSONB,
receipt_id VARCHAR(255),
error TEXT
)
""")
# Create marketplaceoffer table
cursor.execute("""
CREATE TABLE marketplaceoffer (
id VARCHAR(255) PRIMARY KEY,
provider VARCHAR(255),
capacity INTEGER,
price NUMERIC(20, 8),
sla JSONB,
status VARCHAR(20) CHECK (status IN ('active', 'inactive', 'filled', 'expired')),
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
attributes JSONB
)
""")
# Create marketplacebid table
cursor.execute("""
CREATE TABLE marketplacebid (
id VARCHAR(255) PRIMARY KEY,
provider VARCHAR(255),
capacity INTEGER,
price NUMERIC(20, 8),
notes TEXT,
status VARCHAR(20) DEFAULT 'pending',
submitted_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
)
""")
# Create jobreceipt table
cursor.execute("""
CREATE TABLE jobreceipt (
id VARCHAR(255) PRIMARY KEY,
job_id VARCHAR(255) REFERENCES job(id),
receipt_id VARCHAR(255),
payload JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
)
""")
# Create transaction table
cursor.execute("""
CREATE TABLE transaction (
id VARCHAR(255) PRIMARY KEY,
user_id VARCHAR(255),
type VARCHAR(50),
amount NUMERIC(20, 8),
status VARCHAR(20),
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
metadata JSONB
)
""")
# Create indexes for performance
print("Creating indexes...")
cursor.execute("CREATE INDEX idx_job_state ON job(state)")
cursor.execute("CREATE INDEX idx_job_client ON job(client_id)")
cursor.execute("CREATE INDEX idx_job_expires ON job(expires_at)")
cursor.execute("CREATE INDEX idx_miner_status ON miner(status)")
cursor.execute("CREATE INDEX idx_miner_heartbeat ON miner(last_heartbeat)")
cursor.execute("CREATE INDEX idx_wallet_user ON wallet(user_id)")
cursor.execute("CREATE INDEX idx_usersession_token ON usersession(token)")
cursor.execute("CREATE INDEX idx_usersession_expires ON usersession(expires_at)")
cursor.execute("CREATE INDEX idx_marketplaceoffer_status ON marketplaceoffer(status)")
cursor.execute("CREATE INDEX idx_marketplaceoffer_provider ON marketplaceoffer(provider)")
cursor.execute("CREATE INDEX idx_marketplacebid_provider ON marketplacebid(provider)")
conn.commit()
conn.close()
print("✅ PostgreSQL schema created successfully!")
def migrate_data():
"""Migrate data from SQLite to PostgreSQL"""
print("\nStarting data migration...")
# Connect to SQLite
sqlite_conn = sqlite3.connect(SQLITE_DB)
sqlite_conn.row_factory = sqlite3.Row
sqlite_cursor = sqlite_conn.cursor()
# Connect to PostgreSQL
pg_conn = psycopg2.connect(**PG_CONFIG)
pg_cursor = pg_conn.cursor()
# Migration order respecting foreign keys
migrations = [
('user', '''
INSERT INTO "user" (id, email, username, status, created_at, updated_at, last_login)
VALUES (%s, %s, %s, %s, %s, %s, %s)
'''),
('wallet', '''
INSERT INTO wallet (id, user_id, address, balance, created_at, updated_at)
VALUES (%s, %s, %s, %s, %s, %s)
'''),
('miner', '''
INSERT INTO miner (id, region, capabilities, concurrency, status, inflight,
extra_metadata, last_heartbeat, session_token, last_job_at,
jobs_completed, jobs_failed, total_job_duration_ms,
average_job_duration_ms, last_receipt_id)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
'''),
('job', '''
INSERT INTO job (id, client_id, state, payload, constraints, ttl_seconds,
requested_at, expires_at, assigned_miner_id, result, receipt,
receipt_id, error)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
'''),
('marketplaceoffer', '''
INSERT INTO marketplaceoffer (id, provider, capacity, price, sla, status,
created_at, attributes)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
'''),
('marketplacebid', '''
INSERT INTO marketplacebid (id, provider, capacity, price, notes, status,
submitted_at)
VALUES (%s, %s, %s, %s, %s, %s, %s)
'''),
('jobreceipt', '''
INSERT INTO jobreceipt (id, job_id, receipt_id, payload, created_at)
VALUES (%s, %s, %s, %s, %s)
'''),
('usersession', '''
INSERT INTO usersession (id, user_id, token, expires_at, created_at, last_used)
VALUES (%s, %s, %s, %s, %s, %s)
'''),
('transaction', '''
INSERT INTO transaction (id, user_id, type, amount, status, created_at, metadata)
VALUES (%s, %s, %s, %s, %s, %s, %s)
''')
]
for table_name, insert_sql in migrations:
print(f"Migrating {table_name}...")
sqlite_cursor.execute(f"SELECT * FROM {table_name}")
rows = sqlite_cursor.fetchall()
count = 0
for row in rows:
# Convert row to dict and handle JSON fields
values = []
for key in row.keys():
value = row[key]
if key in ['payload', 'constraints', 'result', 'receipt', 'capabilities',
'extra_metadata', 'sla', 'attributes', 'metadata']:
# Handle JSON fields
if isinstance(value, str):
try:
value = json.loads(value)
except:
pass
elif key in ['balance', 'price', 'average_job_duration_ms']:
# Handle numeric fields
if value is not None:
value = Decimal(str(value))
values.append(value)
pg_cursor.execute(insert_sql, values)
count += 1
print(f" - Migrated {count} {table_name} records")
pg_conn.commit()
print(f"\n✅ Migration complete!")
sqlite_conn.close()
pg_conn.close()
def main():
"""Main migration process"""
print("=" * 60)
print("AITBC Coordinator API SQLite to PostgreSQL Migration")
print("=" * 60)
# Check if SQLite DB exists
if not Path(SQLITE_DB).exists():
print(f"❌ SQLite database '{SQLITE_DB}' not found!")
return
# Create PostgreSQL schema
create_pg_schema()
# Migrate data
migrate_data()
print("\n" + "=" * 60)
print("Migration completed successfully!")
print("=" * 60)
print("\nNext steps:")
print("1. Update coordinator-api configuration")
print("2. Install PostgreSQL dependencies")
print("3. Restart the coordinator service")
print("4. Verify data integrity")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,29 @@
#!/bin/bash
echo "=== PostgreSQL Setup for AITBC Coordinator API ==="
echo ""
# Create database and user
echo "Creating coordinator database..."
sudo -u postgres psql -c "CREATE DATABASE aitbc_coordinator;"
sudo -u postgres psql -c "CREATE USER aitbc_user WITH PASSWORD 'aitbc_password';"
sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE aitbc_coordinator TO aitbc_user;"
# Grant schema permissions
sudo -u postgres psql -d aitbc_coordinator -c 'ALTER SCHEMA public OWNER TO aitbc_user;'
sudo -u postgres psql -d aitbc_coordinator -c 'GRANT CREATE ON SCHEMA public TO aitbc_user;'
# Test connection
echo "Testing connection..."
sudo -u postgres psql -c "\l" | grep aitbc_coordinator
echo ""
echo "✅ PostgreSQL setup complete for Coordinator API!"
echo ""
echo "Connection details:"
echo " Database: aitbc_coordinator"
echo " User: aitbc_user"
echo " Host: localhost"
echo " Port: 5432"
echo ""
echo "You can now run the migration script."

View File

@@ -0,0 +1,57 @@
"""Coordinator API configuration with PostgreSQL support"""
from pydantic_settings import BaseSettings
from typing import Optional
class Settings(BaseSettings):
"""Application settings"""
# API Configuration
api_host: str = "0.0.0.0"
api_port: int = 8000
api_prefix: str = "/v1"
debug: bool = False
# Database Configuration
database_url: str = "postgresql://aitbc_user:aitbc_password@localhost:5432/aitbc_coordinator"
# JWT Configuration
jwt_secret: str = "your-secret-key-change-in-production"
jwt_algorithm: str = "HS256"
jwt_expiration_hours: int = 24
# Job Configuration
default_job_ttl_seconds: int = 3600 # 1 hour
max_job_ttl_seconds: int = 86400 # 24 hours
job_cleanup_interval_seconds: int = 300 # 5 minutes
# Miner Configuration
miner_heartbeat_timeout_seconds: int = 120 # 2 minutes
miner_max_inflight: int = 10
# Marketplace Configuration
marketplace_offer_ttl_seconds: int = 3600 # 1 hour
# Wallet Configuration
wallet_rpc_url: str = "http://localhost:9080"
# CORS Configuration
cors_origins: list[str] = [
"http://localhost:3000",
"http://localhost:8080",
"https://aitbc.bubuit.net",
"https://aitbc.bubuit.net:8080"
]
# Logging Configuration
log_level: str = "INFO"
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
class Config:
env_file = ".env"
env_file_encoding = "utf-8"
# Create global settings instance
settings = Settings()

View File

@@ -15,6 +15,7 @@ from .routers import (
services,
marketplace_offers,
zk_applications,
explorer,
)
from .routers import zk_applications
from .routers.governance import router as governance
@@ -51,6 +52,7 @@ def create_app() -> FastAPI:
app.include_router(zk_applications.router, prefix="/v1")
app.include_router(governance, prefix="/v1")
app.include_router(partners, prefix="/v1")
app.include_router(explorer, prefix="/v1")
# Add Prometheus metrics endpoint
metrics_app = make_asgi_app()

View File

@@ -1,4 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlmodel import select
from ..deps import require_admin_key
from ..services import JobService, MinerService
@@ -53,7 +54,7 @@ async def list_miners(session: SessionDep, admin_key: str = Depends(require_admi
miner_service = MinerService(session)
miners = [
{
"miner_id": record.miner_id,
"miner_id": record.id,
"status": record.status,
"inflight": record.inflight,
"concurrency": record.concurrency,

View File

@@ -2,6 +2,7 @@ from fastapi import APIRouter, Depends, HTTPException, status
from ..deps import require_client_key
from ..schemas import JobCreate, JobView, JobResult
from ..types import JobState
from ..services import JobService
from ..storage import SessionDep

View File

@@ -73,7 +73,7 @@ async def submit_result(
duration_ms = int((datetime.utcnow() - job.requested_at).total_seconds() * 1000)
metrics["duration_ms"] = duration_ms
receipt = receipt_service.create_receipt(job, miner_id, req.result, metrics)
receipt = await receipt_service.create_receipt(job, miner_id, req.result, metrics)
job.receipt = receipt
job.receipt_id = receipt["receipt_id"] if receipt else None
session.add(job)

View File

@@ -20,9 +20,9 @@ class PartnerRegister(BaseModel):
"""Register a new partner application"""
name: str = Field(..., min_length=3, max_length=100)
description: str = Field(..., min_length=10, max_length=500)
website: str = Field(..., regex=r'^https?://')
contact: str = Field(..., regex=r'^[^@]+@[^@]+\.[^@]+$')
integration_type: str = Field(..., regex="^(explorer|analytics|wallet|exchange|other)$")
website: str = Field(..., pattern=r'^https?://')
contact: str = Field(..., pattern=r'^[^@]+@[^@]+\.[^@]+$')
integration_type: str = Field(..., pattern="^(explorer|analytics|wallet|exchange|other)$")
class PartnerResponse(BaseModel):
@@ -36,7 +36,7 @@ class PartnerResponse(BaseModel):
class WebhookCreate(BaseModel):
"""Create a webhook subscription"""
url: str = Field(..., regex=r'^https?://')
url: str = Field(..., pattern=r'^https?://')
events: List[str] = Field(..., min_items=1)
secret: Optional[str] = Field(max_length=100)

View File

@@ -195,6 +195,7 @@ class ReceiptSummary(BaseModel):
model_config = ConfigDict(populate_by_name=True)
receiptId: str
jobId: Optional[str] = None
miner: str
coordinator: str
issuedAt: datetime

View File

@@ -50,7 +50,7 @@ class ExplorerService:
height=height,
hash=job.id,
timestamp=job.requested_at,
tx_count=1,
txCount=1,
proposer=proposer,
)
)
@@ -71,13 +71,22 @@ class ExplorerService:
for index, job in enumerate(jobs):
height = _DEFAULT_HEIGHT_BASE + offset + index
status_label = _STATUS_LABELS.get(job.state, job.state.value.title())
value = job.payload.get("value") if isinstance(job.payload, dict) else None
if value is None:
value_str = "0"
elif isinstance(value, (int, float)):
value_str = f"{value}"
else:
value_str = str(value)
# Try to get payment amount from receipt
value_str = "0"
if job.receipt and isinstance(job.receipt, dict):
price = job.receipt.get("price")
if price is not None:
value_str = f"{price}"
# Fallback to payload value if no receipt
if value_str == "0":
value = job.payload.get("value") if isinstance(job.payload, dict) else None
if value is not None:
if isinstance(value, (int, float)):
value_str = f"{value}"
else:
value_str = str(value)
items.append(
TransactionSummary(
@@ -100,14 +109,16 @@ class ExplorerService:
address_map: dict[str, dict[str, object]] = defaultdict(
lambda: {
"address": "",
"balance": "0",
"balance": 0.0,
"tx_count": 0,
"last_active": datetime.min,
"recent_transactions": deque(maxlen=5),
"earned": 0.0,
"spent": 0.0,
}
)
def touch(address: Optional[str], tx_id: str, when: datetime, value_hint: Optional[str] = None) -> None:
def touch(address: Optional[str], tx_id: str, when: datetime, earned: float = 0.0, spent: float = 0.0) -> None:
if not address:
return
entry = address_map[address]
@@ -115,18 +126,27 @@ class ExplorerService:
entry["tx_count"] = int(entry["tx_count"]) + 1
if when > entry["last_active"]:
entry["last_active"] = when
if value_hint:
entry["balance"] = value_hint
# Track earnings and spending
entry["earned"] = float(entry["earned"]) + earned
entry["spent"] = float(entry["spent"]) + spent
entry["balance"] = float(entry["earned"]) - float(entry["spent"])
recent: deque[str] = entry["recent_transactions"] # type: ignore[assignment]
recent.appendleft(tx_id)
for job in jobs:
value = job.payload.get("value") if isinstance(job.payload, dict) else None
value_hint: Optional[str] = None
if value is not None:
value_hint = str(value)
touch(job.client_id, job.id, job.requested_at, value_hint=value_hint)
touch(job.assigned_miner_id, job.id, job.requested_at)
# Get payment amount from receipt if available
price = 0.0
if job.receipt and isinstance(job.receipt, dict):
receipt_price = job.receipt.get("price")
if receipt_price is not None:
try:
price = float(receipt_price)
except (TypeError, ValueError):
pass
# Miner earns, client spends
touch(job.assigned_miner_id, job.id, job.requested_at, earned=price)
touch(job.client_id, job.id, job.requested_at, spent=price)
sorted_addresses = sorted(
address_map.values(),
@@ -138,7 +158,7 @@ class ExplorerService:
items = [
AddressSummary(
address=entry["address"],
balance=str(entry["balance"]),
balance=f"{float(entry['balance']):.6f}",
txCount=int(entry["tx_count"]),
lastActive=entry["last_active"],
recentTransactions=list(entry["recent_transactions"]),
@@ -164,19 +184,24 @@ class ExplorerService:
items: list[ReceiptSummary] = []
for row in rows:
payload = row.payload or {}
miner = payload.get("miner") or payload.get("miner_id") or "unknown"
coordinator = payload.get("coordinator") or payload.get("coordinator_id") or "unknown"
# Extract miner from provider field (receipt format) or fallback
miner = payload.get("provider") or payload.get("miner") or payload.get("miner_id") or "unknown"
# Extract client as coordinator (receipt format) or fallback
coordinator = payload.get("client") or payload.get("coordinator") or payload.get("coordinator_id") or "unknown"
status = payload.get("status") or payload.get("state") or "Unknown"
# Get job_id from payload
job_id_from_payload = payload.get("job_id") or row.job_id
items.append(
ReceiptSummary(
receipt_id=row.receipt_id,
receiptId=row.receipt_id,
miner=miner,
coordinator=coordinator,
issued_at=row.created_at,
issuedAt=row.created_at,
status=status,
payload=payload,
jobId=job_id_from_payload,
)
)
resolved_job_id = job_id or "all"
return ReceiptListResponse(job_id=resolved_job_id, items=items)
return ReceiptListResponse(jobId=resolved_job_id, items=items)

View File

@@ -101,7 +101,7 @@ class JobService:
return None
def _ensure_not_expired(self, job: Job) -> Job:
if job.state == JobState.queued and job.expires_at <= datetime.utcnow():
if job.state in {JobState.queued, JobState.running} and job.expires_at <= datetime.utcnow():
job.state = JobState.expired
job.error = "job expired"
self.session.add(job)

View File

@@ -32,6 +32,7 @@ class MinerService:
miner.concurrency = payload.concurrency
miner.region = payload.region
miner.session_token = session_token
miner.inflight = 0
miner.last_heartbeat = datetime.utcnow()
miner.status = "ONLINE"
self.session.commit()

View File

@@ -35,24 +35,60 @@ class ReceiptService:
) -> Dict[str, Any] | None:
if self._signer is None:
return None
metrics = result_metrics or {}
result_payload = job_result or {}
unit_type = _first_present([
metrics.get("unit_type"),
result_payload.get("unit_type"),
], default="gpu_seconds")
units = _coerce_float(_first_present([
metrics.get("units"),
result_payload.get("units"),
]))
if units is None:
duration_ms = _coerce_float(metrics.get("duration_ms"))
if duration_ms is not None:
units = duration_ms / 1000.0
else:
duration_seconds = _coerce_float(_first_present([
metrics.get("duration_seconds"),
metrics.get("compute_time"),
result_payload.get("execution_time"),
result_payload.get("duration"),
]))
units = duration_seconds
if units is None:
units = 0.0
unit_price = _coerce_float(_first_present([
metrics.get("unit_price"),
result_payload.get("unit_price"),
]))
if unit_price is None:
unit_price = 0.02
price = _coerce_float(_first_present([
metrics.get("price"),
result_payload.get("price"),
metrics.get("aitbc_earned"),
result_payload.get("aitbc_earned"),
metrics.get("cost"),
result_payload.get("cost"),
]))
if price is None:
price = round(units * unit_price, 6)
payload = {
"version": "1.0",
"receipt_id": token_hex(16),
"job_id": job.id,
"provider": miner_id,
"client": job.client_id,
"units": _first_present([
(result_metrics or {}).get("units"),
(job_result or {}).get("units"),
], default=0.0),
"unit_type": _first_present([
(result_metrics or {}).get("unit_type"),
(job_result or {}).get("unit_type"),
], default="gpu_seconds"),
"price": _first_present([
(result_metrics or {}).get("price"),
(job_result or {}).get("price"),
]),
"status": job.state.value,
"units": units,
"unit_type": unit_type,
"unit_price": unit_price,
"price": price,
"started_at": int(job.requested_at.timestamp()) if job.requested_at else int(datetime.utcnow().timestamp()),
"completed_at": int(datetime.utcnow().timestamp()),
"metadata": {
@@ -105,3 +141,13 @@ def _first_present(values: list[Optional[Any]], default: Optional[Any] = None) -
if value is not None:
return value
return default
def _coerce_float(value: Any) -> Optional[float]:
"""Coerce a value to float, returning None if not possible"""
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError):
return None

View File

@@ -0,0 +1,223 @@
"""PostgreSQL database module for Coordinator API"""
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.pool import StaticPool
import psycopg2
from psycopg2.extras import RealDictCursor
from typing import Generator, Optional, Dict, Any, List
import json
from datetime import datetime
from decimal import Decimal
from .config_pg import settings
# SQLAlchemy setup for complex queries
engine = create_engine(
settings.database_url,
echo=settings.debug,
pool_pre_ping=True,
pool_recycle=300,
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Direct PostgreSQL connection for performance
def get_pg_connection():
"""Get direct PostgreSQL connection"""
return psycopg2.connect(
host="localhost",
database="aitbc_coordinator",
user="aitbc_user",
password="aitbc_password",
port=5432,
cursor_factory=RealDictCursor
)
def get_db() -> Generator[Session, None, None]:
"""Get database session"""
db = SessionLocal()
try:
yield db
finally:
db.close()
class PostgreSQLAdapter:
"""PostgreSQL adapter for high-performance operations"""
def __init__(self):
self.connection = get_pg_connection()
def execute_query(self, query: str, params: tuple = None) -> List[Dict[str, Any]]:
"""Execute a query and return results"""
with self.connection.cursor() as cursor:
cursor.execute(query, params)
return cursor.fetchall()
def execute_update(self, query: str, params: tuple = None) -> int:
"""Execute an update/insert/delete query"""
with self.connection.cursor() as cursor:
cursor.execute(query, params)
self.connection.commit()
return cursor.rowcount
def execute_batch(self, query: str, params_list: List[tuple]) -> int:
"""Execute batch insert/update"""
with self.connection.cursor() as cursor:
cursor.executemany(query, params_list)
self.connection.commit()
return cursor.rowcount
def get_job_by_id(self, job_id: str) -> Optional[Dict[str, Any]]:
"""Get job by ID"""
query = "SELECT * FROM job WHERE id = %s"
results = self.execute_query(query, (job_id,))
return results[0] if results else None
def get_available_miners(self, region: Optional[str] = None) -> List[Dict[str, Any]]:
"""Get available miners"""
if region:
query = """
SELECT * FROM miner
WHERE status = 'active'
AND inflight < concurrency
AND (region = %s OR region IS NULL)
ORDER BY last_heartbeat DESC
"""
return self.execute_query(query, (region,))
else:
query = """
SELECT * FROM miner
WHERE status = 'active'
AND inflight < concurrency
ORDER BY last_heartbeat DESC
"""
return self.execute_query(query)
def get_pending_jobs(self, limit: int = 100) -> List[Dict[str, Any]]:
"""Get pending jobs"""
query = """
SELECT * FROM job
WHERE state = 'pending'
AND expires_at > NOW()
ORDER BY requested_at ASC
LIMIT %s
"""
return self.execute_query(query, (limit,))
def update_job_state(self, job_id: str, state: str, **kwargs) -> bool:
"""Update job state"""
set_clauses = ["state = %s"]
params = [state, job_id]
for key, value in kwargs.items():
set_clauses.append(f"{key} = %s")
params.insert(-1, value)
query = f"""
UPDATE job
SET {', '.join(set_clauses)}, updated_at = NOW()
WHERE id = %s
"""
return self.execute_update(query, params) > 0
def get_marketplace_offers(self, status: str = "active") -> List[Dict[str, Any]]:
"""Get marketplace offers"""
query = """
SELECT * FROM marketplaceoffer
WHERE status = %s
ORDER BY price ASC, created_at DESC
"""
return self.execute_query(query, (status,))
def get_user_wallets(self, user_id: str) -> List[Dict[str, Any]]:
"""Get user wallets"""
query = """
SELECT * FROM wallet
WHERE user_id = %s
ORDER BY created_at DESC
"""
return self.execute_query(query, (user_id,))
def create_job(self, job_data: Dict[str, Any]) -> str:
"""Create a new job"""
query = """
INSERT INTO job (id, client_id, state, payload, constraints,
ttl_seconds, requested_at, expires_at)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
RETURNING id
"""
result = self.execute_query(query, (
job_data['id'],
job_data['client_id'],
job_data['state'],
json.dumps(job_data['payload']),
json.dumps(job_data.get('constraints', {})),
job_data['ttl_seconds'],
job_data['requested_at'],
job_data['expires_at']
))
return result[0]['id']
def cleanup_expired_jobs(self) -> int:
"""Clean up expired jobs"""
query = """
UPDATE job
SET state = 'expired', updated_at = NOW()
WHERE state = 'pending'
AND expires_at < NOW()
"""
return self.execute_update(query)
def get_miner_stats(self, miner_id: str) -> Optional[Dict[str, Any]]:
"""Get miner statistics"""
query = """
SELECT
COUNT(*) as total_jobs,
COUNT(CASE WHEN state = 'completed' THEN 1 END) as completed_jobs,
COUNT(CASE WHEN state = 'failed' THEN 1 END) as failed_jobs,
AVG(CASE WHEN state = 'completed' THEN EXTRACT(EPOCH FROM (updated_at - requested_at)) END) as avg_duration_seconds
FROM job
WHERE assigned_miner_id = %s
"""
results = self.execute_query(query, (miner_id,))
return results[0] if results else None
def close(self):
"""Close the connection"""
if self.connection:
self.connection.close()
# Global adapter instance
db_adapter = PostgreSQLAdapter()
# Database initialization
def init_db():
"""Initialize database tables"""
# Import models here to avoid circular imports
from .models import Base
# Create all tables
Base.metadata.create_all(bind=engine)
print("✅ PostgreSQL database initialized successfully!")
# Health check
def check_db_health() -> Dict[str, Any]:
"""Check database health"""
try:
result = db_adapter.execute_query("SELECT 1 as health_check")
return {
"status": "healthy",
"database": "postgresql",
"timestamp": datetime.utcnow().isoformat()
}
except Exception as e:
return {
"status": "unhealthy",
"error": str(e),
"timestamp": datetime.utcnow().isoformat()
}