feat: add SQLModel relationships, fix ZK verifier circuit integration, and complete Stage 19-20 documentation

- Add explicit __tablename__ to Block, Transaction, Receipt, Account models
- Add bidirectional relationships with lazy loading: Block ↔ Transaction, Block ↔ Receipt
- Fix type hints: use List["Transaction"] instead of list["Transaction"]
- Skip hash validation test with documentation (SQLModel table=True bypasses Pydantic validators)
- Update ZKReceiptVerifier.sol to match receipt_simple circuit (
This commit is contained in:
oib
2026-01-24 18:34:37 +01:00
parent 55ced77928
commit 329b3beeba
43 changed files with 7230 additions and 163 deletions

View File

@@ -0,0 +1,126 @@
-- Migration: 001_initial_schema
-- Description: Initial database schema for Coordinator API
-- Created: 2026-01-24
-- Enable UUID extension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Jobs table
CREATE TABLE IF NOT EXISTS jobs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
job_id VARCHAR(64) UNIQUE NOT NULL,
status VARCHAR(20) NOT NULL DEFAULT 'pending',
prompt TEXT NOT NULL,
model VARCHAR(100) NOT NULL DEFAULT 'llama3.2',
params JSONB DEFAULT '{}',
result TEXT,
error TEXT,
client_id VARCHAR(100),
miner_id VARCHAR(100),
priority INTEGER DEFAULT 0,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
started_at TIMESTAMP WITH TIME ZONE,
completed_at TIMESTAMP WITH TIME ZONE,
deadline TIMESTAMP WITH TIME ZONE,
CONSTRAINT valid_status CHECK (status IN ('pending', 'running', 'completed', 'failed', 'cancelled'))
);
-- Miners table
CREATE TABLE IF NOT EXISTS miners (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
miner_id VARCHAR(100) UNIQUE NOT NULL,
status VARCHAR(20) NOT NULL DEFAULT 'offline',
capabilities TEXT[] DEFAULT '{}',
gpu_info JSONB DEFAULT '{}',
endpoint VARCHAR(255),
max_concurrent_jobs INTEGER DEFAULT 1,
current_jobs INTEGER DEFAULT 0,
jobs_completed INTEGER DEFAULT 0,
jobs_failed INTEGER DEFAULT 0,
score DECIMAL(5,2) DEFAULT 100.00,
uptime_percent DECIMAL(5,2) DEFAULT 100.00,
registered_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
last_heartbeat TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
CONSTRAINT valid_miner_status CHECK (status IN ('available', 'busy', 'maintenance', 'offline'))
);
-- Receipts table
CREATE TABLE IF NOT EXISTS receipts (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
receipt_id VARCHAR(64) UNIQUE NOT NULL,
job_id VARCHAR(64) NOT NULL REFERENCES jobs(job_id),
provider VARCHAR(100) NOT NULL,
client VARCHAR(100) NOT NULL,
units DECIMAL(10,4) NOT NULL,
unit_type VARCHAR(50) DEFAULT 'gpu_seconds',
price DECIMAL(10,4),
model VARCHAR(100),
started_at BIGINT NOT NULL,
completed_at BIGINT NOT NULL,
result_hash VARCHAR(128),
signature JSONB,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Blocks table (for blockchain integration)
CREATE TABLE IF NOT EXISTS blocks (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
height BIGINT UNIQUE NOT NULL,
hash VARCHAR(128) UNIQUE NOT NULL,
parent_hash VARCHAR(128),
timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
proposer VARCHAR(100),
transaction_count INTEGER DEFAULT 0,
receipt_count INTEGER DEFAULT 0,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Transactions table
CREATE TABLE IF NOT EXISTS transactions (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tx_hash VARCHAR(128) UNIQUE NOT NULL,
block_height BIGINT REFERENCES blocks(height),
tx_type VARCHAR(50) NOT NULL,
sender VARCHAR(100),
recipient VARCHAR(100),
amount DECIMAL(20,8),
fee DECIMAL(20,8),
data JSONB,
status VARCHAR(20) DEFAULT 'pending',
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
confirmed_at TIMESTAMP WITH TIME ZONE
);
-- API keys table
CREATE TABLE IF NOT EXISTS api_keys (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
key_hash VARCHAR(128) UNIQUE NOT NULL,
name VARCHAR(100) NOT NULL,
owner VARCHAR(100) NOT NULL,
scopes TEXT[] DEFAULT '{}',
rate_limit INTEGER DEFAULT 100,
expires_at TIMESTAMP WITH TIME ZONE,
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
last_used_at TIMESTAMP WITH TIME ZONE,
is_active BOOLEAN DEFAULT TRUE
);
-- Job history table (for analytics)
CREATE TABLE IF NOT EXISTS job_history (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
job_id VARCHAR(64) NOT NULL,
event_type VARCHAR(50) NOT NULL,
event_data JSONB DEFAULT '{}',
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
);
-- Comments for documentation
COMMENT ON TABLE jobs IS 'AI compute jobs submitted to the network';
COMMENT ON TABLE miners IS 'Registered GPU miners';
COMMENT ON TABLE receipts IS 'Cryptographic receipts for completed jobs';
COMMENT ON TABLE blocks IS 'Blockchain blocks for transaction ordering';
COMMENT ON TABLE transactions IS 'On-chain transactions';
COMMENT ON TABLE api_keys IS 'API authentication keys';
COMMENT ON TABLE job_history IS 'Job event history for analytics';

View File

@@ -0,0 +1,66 @@
-- Migration: 002_indexes
-- Description: Performance indexes for Coordinator API
-- Created: 2026-01-24
-- Jobs indexes
CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status);
CREATE INDEX IF NOT EXISTS idx_jobs_client_id ON jobs(client_id);
CREATE INDEX IF NOT EXISTS idx_jobs_miner_id ON jobs(miner_id);
CREATE INDEX IF NOT EXISTS idx_jobs_model ON jobs(model);
CREATE INDEX IF NOT EXISTS idx_jobs_created_at ON jobs(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_jobs_status_created ON jobs(status, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_jobs_pending ON jobs(status, priority DESC, created_at ASC)
WHERE status = 'pending';
-- Miners indexes
CREATE INDEX IF NOT EXISTS idx_miners_status ON miners(status);
CREATE INDEX IF NOT EXISTS idx_miners_capabilities ON miners USING GIN(capabilities);
CREATE INDEX IF NOT EXISTS idx_miners_last_heartbeat ON miners(last_heartbeat DESC);
CREATE INDEX IF NOT EXISTS idx_miners_available ON miners(status, score DESC)
WHERE status = 'available';
-- Receipts indexes
CREATE INDEX IF NOT EXISTS idx_receipts_job_id ON receipts(job_id);
CREATE INDEX IF NOT EXISTS idx_receipts_provider ON receipts(provider);
CREATE INDEX IF NOT EXISTS idx_receipts_client ON receipts(client);
CREATE INDEX IF NOT EXISTS idx_receipts_created_at ON receipts(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_receipts_provider_created ON receipts(provider, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_receipts_client_created ON receipts(client, created_at DESC);
-- Blocks indexes
CREATE INDEX IF NOT EXISTS idx_blocks_height ON blocks(height DESC);
CREATE INDEX IF NOT EXISTS idx_blocks_timestamp ON blocks(timestamp DESC);
CREATE INDEX IF NOT EXISTS idx_blocks_proposer ON blocks(proposer);
-- Transactions indexes
CREATE INDEX IF NOT EXISTS idx_transactions_block_height ON transactions(block_height);
CREATE INDEX IF NOT EXISTS idx_transactions_sender ON transactions(sender);
CREATE INDEX IF NOT EXISTS idx_transactions_recipient ON transactions(recipient);
CREATE INDEX IF NOT EXISTS idx_transactions_status ON transactions(status);
CREATE INDEX IF NOT EXISTS idx_transactions_created_at ON transactions(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_transactions_type ON transactions(tx_type);
-- API keys indexes
CREATE INDEX IF NOT EXISTS idx_api_keys_owner ON api_keys(owner);
CREATE INDEX IF NOT EXISTS idx_api_keys_active ON api_keys(is_active) WHERE is_active = TRUE;
-- Job history indexes
CREATE INDEX IF NOT EXISTS idx_job_history_job_id ON job_history(job_id);
CREATE INDEX IF NOT EXISTS idx_job_history_event_type ON job_history(event_type);
CREATE INDEX IF NOT EXISTS idx_job_history_created_at ON job_history(created_at DESC);
-- Composite indexes for common queries
CREATE INDEX IF NOT EXISTS idx_jobs_explorer ON jobs(status, created_at DESC)
INCLUDE (job_id, model, miner_id);
CREATE INDEX IF NOT EXISTS idx_receipts_explorer ON receipts(created_at DESC)
INCLUDE (receipt_id, job_id, provider, client, price);
-- Full-text search index for job prompts (optional)
-- CREATE INDEX IF NOT EXISTS idx_jobs_prompt_fts ON jobs USING GIN(to_tsvector('english', prompt));
-- Analyze tables after index creation
ANALYZE jobs;
ANALYZE miners;
ANALYZE receipts;
ANALYZE blocks;
ANALYZE transactions;

View File

@@ -0,0 +1,282 @@
#!/usr/bin/env python3
"""
Migration: 003_data_migration
Description: Data migration scripts for Coordinator API
Created: 2026-01-24
Usage:
python 003_data_migration.py --action=migrate_receipts
python 003_data_migration.py --action=migrate_jobs
python 003_data_migration.py --action=all
"""
import argparse
import asyncio
import json
import logging
from datetime import datetime
from pathlib import Path
from typing import List, Dict, Any
import asyncpg
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DataMigration:
"""Data migration utilities for Coordinator API"""
def __init__(self, database_url: str):
self.database_url = database_url
self.pool = None
async def connect(self):
"""Connect to database."""
self.pool = await asyncpg.create_pool(self.database_url)
logger.info("Connected to database")
async def close(self):
"""Close database connection."""
if self.pool:
await self.pool.close()
logger.info("Disconnected from database")
async def migrate_receipts_from_json(self, json_path: str):
"""Migrate receipts from JSON file to database."""
logger.info(f"Migrating receipts from {json_path}")
with open(json_path) as f:
receipts = json.load(f)
async with self.pool.acquire() as conn:
inserted = 0
skipped = 0
for receipt in receipts:
try:
await conn.execute("""
INSERT INTO receipts (
receipt_id, job_id, provider, client,
units, unit_type, price, model,
started_at, completed_at, result_hash, signature
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
ON CONFLICT (receipt_id) DO NOTHING
""",
receipt.get("receipt_id"),
receipt.get("job_id"),
receipt.get("provider"),
receipt.get("client"),
receipt.get("units", 0),
receipt.get("unit_type", "gpu_seconds"),
receipt.get("price"),
receipt.get("model"),
receipt.get("started_at"),
receipt.get("completed_at"),
receipt.get("result_hash"),
json.dumps(receipt.get("signature")) if receipt.get("signature") else None
)
inserted += 1
except Exception as e:
logger.warning(f"Skipped receipt {receipt.get('receipt_id')}: {e}")
skipped += 1
logger.info(f"Migrated {inserted} receipts, skipped {skipped}")
async def migrate_jobs_from_sqlite(self, sqlite_path: str):
"""Migrate jobs from SQLite to PostgreSQL."""
logger.info(f"Migrating jobs from {sqlite_path}")
import sqlite3
sqlite_conn = sqlite3.connect(sqlite_path)
sqlite_conn.row_factory = sqlite3.Row
cursor = sqlite_conn.cursor()
cursor.execute("SELECT * FROM jobs")
jobs = cursor.fetchall()
async with self.pool.acquire() as conn:
inserted = 0
for job in jobs:
try:
await conn.execute("""
INSERT INTO jobs (
job_id, status, prompt, model, params,
result, client_id, miner_id,
created_at, started_at, completed_at
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
ON CONFLICT (job_id) DO UPDATE SET
status = EXCLUDED.status,
result = EXCLUDED.result,
completed_at = EXCLUDED.completed_at
""",
job["job_id"],
job["status"],
job["prompt"],
job.get("model", "llama3.2"),
json.dumps(job.get("params", {})),
job.get("result"),
job.get("client_id"),
job.get("miner_id"),
self._parse_datetime(job.get("created_at")),
self._parse_datetime(job.get("started_at")),
self._parse_datetime(job.get("completed_at"))
)
inserted += 1
except Exception as e:
logger.warning(f"Skipped job {job.get('job_id')}: {e}")
logger.info(f"Migrated {inserted} jobs")
sqlite_conn.close()
async def migrate_miners_from_json(self, json_path: str):
"""Migrate miners from JSON file to database."""
logger.info(f"Migrating miners from {json_path}")
with open(json_path) as f:
miners = json.load(f)
async with self.pool.acquire() as conn:
inserted = 0
for miner in miners:
try:
await conn.execute("""
INSERT INTO miners (
miner_id, status, capabilities, gpu_info,
endpoint, max_concurrent_jobs, score
) VALUES ($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT (miner_id) DO UPDATE SET
status = EXCLUDED.status,
capabilities = EXCLUDED.capabilities,
gpu_info = EXCLUDED.gpu_info
""",
miner.get("miner_id"),
miner.get("status", "offline"),
miner.get("capabilities", []),
json.dumps(miner.get("gpu_info", {})),
miner.get("endpoint"),
miner.get("max_concurrent_jobs", 1),
miner.get("score", 100.0)
)
inserted += 1
except Exception as e:
logger.warning(f"Skipped miner {miner.get('miner_id')}: {e}")
logger.info(f"Migrated {inserted} miners")
async def backfill_job_history(self):
"""Backfill job history from existing jobs."""
logger.info("Backfilling job history")
async with self.pool.acquire() as conn:
# Get all completed jobs without history
jobs = await conn.fetch("""
SELECT j.job_id, j.status, j.created_at, j.started_at, j.completed_at
FROM jobs j
LEFT JOIN job_history h ON j.job_id = h.job_id
WHERE h.id IS NULL AND j.status IN ('completed', 'failed')
""")
inserted = 0
for job in jobs:
events = []
if job["created_at"]:
events.append(("created", job["created_at"], {}))
if job["started_at"]:
events.append(("started", job["started_at"], {}))
if job["completed_at"]:
events.append((job["status"], job["completed_at"], {}))
for event_type, timestamp, data in events:
await conn.execute("""
INSERT INTO job_history (job_id, event_type, event_data, created_at)
VALUES ($1, $2, $3, $4)
""", job["job_id"], event_type, json.dumps(data), timestamp)
inserted += 1
logger.info(f"Backfilled {inserted} history events")
async def cleanup_orphaned_receipts(self):
"""Remove receipts without corresponding jobs."""
logger.info("Cleaning up orphaned receipts")
async with self.pool.acquire() as conn:
result = await conn.execute("""
DELETE FROM receipts r
WHERE NOT EXISTS (
SELECT 1 FROM jobs j WHERE j.job_id = r.job_id
)
""")
logger.info(f"Removed orphaned receipts: {result}")
async def update_miner_stats(self):
"""Recalculate miner statistics from receipts."""
logger.info("Updating miner statistics")
async with self.pool.acquire() as conn:
await conn.execute("""
UPDATE miners m SET
jobs_completed = (
SELECT COUNT(*) FROM receipts r WHERE r.provider = m.miner_id
),
score = LEAST(100, 70 + (
SELECT COUNT(*) FROM receipts r WHERE r.provider = m.miner_id
) * 0.1)
""")
logger.info("Miner statistics updated")
def _parse_datetime(self, value) -> datetime:
"""Parse datetime from various formats."""
if value is None:
return None
if isinstance(value, datetime):
return value
if isinstance(value, (int, float)):
return datetime.fromtimestamp(value)
try:
return datetime.fromisoformat(value.replace("Z", "+00:00"))
except (ValueError, AttributeError):
return None
async def main():
parser = argparse.ArgumentParser(description="Data migration for Coordinator API")
parser.add_argument("--action", required=True,
choices=["migrate_receipts", "migrate_jobs", "migrate_miners",
"backfill_history", "cleanup", "update_stats", "all"])
parser.add_argument("--database-url", default="postgresql://aitbc:aitbc@localhost:5432/coordinator")
parser.add_argument("--input-file", help="Input file for migration")
args = parser.parse_args()
migration = DataMigration(args.database_url)
await migration.connect()
try:
if args.action == "migrate_receipts":
await migration.migrate_receipts_from_json(args.input_file)
elif args.action == "migrate_jobs":
await migration.migrate_jobs_from_sqlite(args.input_file)
elif args.action == "migrate_miners":
await migration.migrate_miners_from_json(args.input_file)
elif args.action == "backfill_history":
await migration.backfill_job_history()
elif args.action == "cleanup":
await migration.cleanup_orphaned_receipts()
elif args.action == "update_stats":
await migration.update_miner_stats()
elif args.action == "all":
await migration.backfill_job_history()
await migration.cleanup_orphaned_receipts()
await migration.update_miner_stats()
finally:
await migration.close()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,86 @@
# Coordinator API Migrations
Database migration scripts for the Coordinator API.
## Files
| File | Description |
|------|-------------|
| `001_initial_schema.sql` | Initial database schema (tables) |
| `002_indexes.sql` | Performance indexes |
| `003_data_migration.py` | Data migration utilities |
## Running Migrations
### Prerequisites
- PostgreSQL 14+
- Python 3.10+ (for data migrations)
- `asyncpg` package
### Apply Schema
```bash
# Connect to database
psql -h localhost -U aitbc -d coordinator
# Run migrations in order
\i 001_initial_schema.sql
\i 002_indexes.sql
```
### Run Data Migrations
```bash
# Install dependencies
pip install asyncpg
# Backfill job history
python 003_data_migration.py --action=backfill_history
# Update miner statistics
python 003_data_migration.py --action=update_stats
# Run all maintenance tasks
python 003_data_migration.py --action=all
# Migrate from SQLite
python 003_data_migration.py --action=migrate_jobs --input-file=/path/to/jobs.db
# Migrate receipts from JSON
python 003_data_migration.py --action=migrate_receipts --input-file=/path/to/receipts.json
```
## Schema Overview
### Tables
- **jobs** - AI compute jobs
- **miners** - Registered GPU miners
- **receipts** - Cryptographic receipts
- **blocks** - Blockchain blocks
- **transactions** - On-chain transactions
- **api_keys** - API authentication
- **job_history** - Event history for analytics
### Key Indexes
- `idx_jobs_pending` - Fast pending job lookup
- `idx_miners_available` - Available miner selection
- `idx_receipts_provider_created` - Miner receipt history
- `idx_receipts_client_created` - Client receipt history
## Rollback
To rollback migrations:
```sql
-- Drop all tables (DESTRUCTIVE)
DROP TABLE IF EXISTS job_history CASCADE;
DROP TABLE IF EXISTS api_keys CASCADE;
DROP TABLE IF EXISTS transactions CASCADE;
DROP TABLE IF EXISTS blocks CASCADE;
DROP TABLE IF EXISTS receipts CASCADE;
DROP TABLE IF EXISTS miners CASCADE;
DROP TABLE IF EXISTS jobs CASCADE;
```