Files
aitbc/apps/pool-hub/migrations/versions/a58c1f3b3e87_initial_schema.py
aitbc e22d864944
Some checks failed
API Endpoint Tests / test-api-endpoints (push) Successful in 11s
CLI Tests / test-cli (push) Failing after 7s
Documentation Validation / validate-docs (push) Successful in 8s
Documentation Validation / validate-policies-strict (push) Successful in 3s
Integration Tests / test-service-integration (push) Successful in 38s
Python Tests / test-python (push) Successful in 11s
Security Scanning / security-scan (push) Successful in 29s
Multi-Node Blockchain Health Monitoring / health-check (push) Successful in 1s
feat: implement CLI blockchain features and pool hub enhancements
CLI Blockchain Features:
- Added block operations: import, export, import-chain, blocks-range
- Added messaging system commands (deploy, state, topics, create-topic, messages, post, vote, search, reputation, moderate)
- Added network force-sync operation
- Replaced marketplace handlers with actual RPC calls
- Replaced AI handlers with actual RPC calls
- Added account operations (account get)
- Added transaction query operations
- Added mempool query operations
- Created keystore_auth.py for authentication
- Removed extended features interception
- All handlers use keystore credentials for authenticated endpoints

Pool Hub Enhancements:
- Added SLA monitoring and capacity tables
- Added billing integration service
- Added SLA collector service
- Added SLA router endpoints
- Updated pool hub models and settings
- Added integration tests for billing and SLA
- Updated documentation with SLA monitoring guide
2026-04-22 15:59:00 +02:00

104 lines
4.0 KiB
Python
Executable File

"""initial schema
Revision ID: a58c1f3b3e87
Revises:
Create Date: 2025-09-27 12:07:40.000000
"""
from __future__ import annotations
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a58c1f3b3e87"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"miners",
sa.Column("miner_id", sa.String(length=64), primary_key=True),
sa.Column("api_key_hash", sa.String(length=128), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
sa.Column("last_seen_at", sa.DateTime(timezone=True)),
sa.Column("addr", sa.String(length=256)),
sa.Column("proto", sa.String(length=32)),
sa.Column("gpu_vram_gb", sa.Float()),
sa.Column("gpu_name", sa.String(length=128)),
sa.Column("cpu_cores", sa.Integer()),
sa.Column("ram_gb", sa.Float()),
sa.Column("max_parallel", sa.Integer()),
sa.Column("base_price", sa.Float()),
sa.Column("tags", sa.JSON()),
sa.Column("capabilities", sa.JSON()),
sa.Column("trust_score", sa.Float(), server_default="0.5"),
sa.Column("region", sa.String(length=64)),
)
op.create_table(
"miner_status",
sa.Column("miner_id", sa.String(length=64), sa.ForeignKey("miners.miner_id", ondelete="CASCADE"), primary_key=True),
sa.Column("queue_len", sa.Integer(), server_default="0"),
sa.Column("busy", sa.Boolean(), server_default=sa.text("false")),
sa.Column("avg_latency_ms", sa.Integer()),
sa.Column("temp_c", sa.Integer()),
sa.Column("mem_free_gb", sa.Float()),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
)
op.create_table(
"match_requests",
sa.Column("id", sa.String(36), primary_key=True),
sa.Column("job_id", sa.String(length=64), nullable=False),
sa.Column("requirements", sa.JSON(), nullable=False),
sa.Column("hints", sa.JSON(), server_default=sa.text("'{}'")),
sa.Column("top_k", sa.Integer(), server_default="1"),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
)
op.create_table(
"match_results",
sa.Column("id", sa.String(36), primary_key=True),
sa.Column("request_id", sa.String(36), sa.ForeignKey("match_requests.id", ondelete="CASCADE"), nullable=False),
sa.Column("miner_id", sa.String(length=64), nullable=False),
sa.Column("score", sa.Float(), nullable=False),
sa.Column("explain", sa.Text()),
sa.Column("eta_ms", sa.Integer()),
sa.Column("price", sa.Float()),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
)
op.create_index("ix_match_results_request_id", "match_results", ["request_id"])
op.create_table(
"feedback",
sa.Column("id", sa.String(36), primary_key=True),
sa.Column("job_id", sa.String(length=64), nullable=False),
sa.Column("miner_id", sa.String(length=64), sa.ForeignKey("miners.miner_id", ondelete="CASCADE"), nullable=False),
sa.Column("outcome", sa.String(length=32), nullable=False),
sa.Column("latency_ms", sa.Integer()),
sa.Column("fail_code", sa.String(length=64)),
sa.Column("tokens_spent", sa.Float()),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("NOW()")),
)
op.create_index("ix_feedback_miner_id", "feedback", ["miner_id"])
op.create_index("ix_feedback_job_id", "feedback", ["job_id"])
def downgrade() -> None:
op.drop_index("ix_feedback_job_id", table_name="feedback")
op.drop_index("ix_feedback_miner_id", table_name="feedback")
op.drop_table("feedback")
op.drop_index("ix_match_results_request_id", table_name="match_results")
op.drop_table("match_results")
op.drop_table("match_requests")
op.drop_table("miner_status")
op.drop_table("miners")