fix(blockchain): fix database schema, migrations and add genesis script
This commit is contained in:
@@ -0,0 +1,52 @@
|
|||||||
|
"""add_chain_id
|
||||||
|
|
||||||
|
Revision ID: 50fb6691025c
|
||||||
|
Revises: fix_transaction_block_foreign_key
|
||||||
|
Create Date: 2026-03-03 17:48:48.141666
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '50fb6691025c'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'fix_transaction_block_foreign_key'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
with op.batch_alter_table('account', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('chain_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False, server_default='ait-testnet'))
|
||||||
|
|
||||||
|
with op.batch_alter_table('block', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('chain_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False, server_default='ait-testnet'))
|
||||||
|
batch_op.drop_index('ix_block_height')
|
||||||
|
batch_op.create_index('ix_block_height', ['height'], unique=False)
|
||||||
|
batch_op.create_index('ix_block_chain_id', ['chain_id'], unique=False)
|
||||||
|
batch_op.create_unique_constraint('uix_block_chain_height', ['chain_id', 'height'])
|
||||||
|
|
||||||
|
with op.batch_alter_table('receipt', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('chain_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False, server_default='ait-testnet'))
|
||||||
|
batch_op.drop_index('ix_receipt_receipt_id')
|
||||||
|
batch_op.create_index('ix_receipt_receipt_id', ['receipt_id'], unique=False)
|
||||||
|
batch_op.create_index('ix_receipt_chain_id', ['chain_id'], unique=False)
|
||||||
|
batch_op.create_unique_constraint('uix_receipt_chain_id', ['chain_id', 'receipt_id'])
|
||||||
|
# Drop foreign key constraint using naming convention if needed,
|
||||||
|
# but since SQLite doesn't support it directly, batch_alter_table handles it
|
||||||
|
# batch_op.drop_constraint(None, type_='foreignkey')
|
||||||
|
|
||||||
|
with op.batch_alter_table('transaction', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('chain_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False, server_default='ait-testnet'))
|
||||||
|
batch_op.drop_index('ix_transaction_tx_hash')
|
||||||
|
batch_op.create_index('ix_transaction_tx_hash', ['tx_hash'], unique=False)
|
||||||
|
batch_op.create_index('ix_transaction_chain_id', ['chain_id'], unique=False)
|
||||||
|
batch_op.create_unique_constraint('uix_tx_chain_hash', ['chain_id', 'tx_hash'])
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
pass
|
||||||
@@ -11,7 +11,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = 'fix_transaction_block_foreign_key'
|
revision = 'fix_transaction_block_foreign_key'
|
||||||
down_revision = None
|
down_revision = "80bc0020bde2"
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
@@ -22,7 +22,7 @@ def upgrade():
|
|||||||
|
|
||||||
# Create new transaction table with correct foreign key
|
# Create new transaction table with correct foreign key
|
||||||
op.execute("""
|
op.execute("""
|
||||||
CREATE TABLE transaction_new (
|
CREATE TABLE "transaction_new" (
|
||||||
id INTEGER NOT NULL PRIMARY KEY,
|
id INTEGER NOT NULL PRIMARY KEY,
|
||||||
tx_hash VARCHAR NOT NULL,
|
tx_hash VARCHAR NOT NULL,
|
||||||
block_height INTEGER,
|
block_height INTEGER,
|
||||||
@@ -36,18 +36,18 @@ def upgrade():
|
|||||||
|
|
||||||
# Copy data from old table
|
# Copy data from old table
|
||||||
op.execute("""
|
op.execute("""
|
||||||
INSERT INTO transaction_new (id, tx_hash, block_height, sender, recipient, payload, created_at)
|
INSERT INTO "transaction_new" (id, tx_hash, block_height, sender, recipient, payload, created_at)
|
||||||
SELECT id, tx_hash, block_height, sender, recipient, payload, created_at FROM transaction
|
SELECT id, tx_hash, block_height, sender, recipient, payload, created_at FROM "transaction"
|
||||||
""")
|
""")
|
||||||
|
|
||||||
# Drop old table and rename new one
|
# Drop old table and rename new one
|
||||||
op.execute("DROP TABLE transaction")
|
op.execute('DROP TABLE "transaction"')
|
||||||
op.execute("ALTER TABLE transaction_new RENAME TO transaction")
|
op.execute('ALTER TABLE "transaction_new" RENAME TO "transaction"')
|
||||||
|
|
||||||
# Recreate indexes
|
# Recreate indexes
|
||||||
op.execute("CREATE UNIQUE INDEX ix_transaction_tx_hash ON transaction (tx_hash)")
|
op.execute('CREATE UNIQUE INDEX ix_transaction_tx_hash ON "transaction" (tx_hash)')
|
||||||
op.execute("CREATE INDEX ix_transaction_block_height ON transaction (block_height)")
|
op.execute('CREATE INDEX ix_transaction_block_height ON "transaction" (block_height)')
|
||||||
op.execute("CREATE INDEX ix_transaction_created_at ON transaction (created_at)")
|
op.execute('CREATE INDEX ix_transaction_created_at ON "transaction" (created_at)')
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
@@ -55,7 +55,7 @@ def downgrade():
|
|||||||
|
|
||||||
# Create new transaction table with old foreign key
|
# Create new transaction table with old foreign key
|
||||||
op.execute("""
|
op.execute("""
|
||||||
CREATE TABLE transaction_new (
|
CREATE TABLE "transaction_new" (
|
||||||
id INTEGER NOT NULL PRIMARY KEY,
|
id INTEGER NOT NULL PRIMARY KEY,
|
||||||
tx_hash VARCHAR NOT NULL,
|
tx_hash VARCHAR NOT NULL,
|
||||||
block_height INTEGER,
|
block_height INTEGER,
|
||||||
@@ -69,15 +69,15 @@ def downgrade():
|
|||||||
|
|
||||||
# Copy data from old table
|
# Copy data from old table
|
||||||
op.execute("""
|
op.execute("""
|
||||||
INSERT INTO transaction_new (id, tx_hash, block_height, sender, recipient, payload, created_at)
|
INSERT INTO "transaction_new" (id, tx_hash, block_height, sender, recipient, payload, created_at)
|
||||||
SELECT id, tx_hash, block_height, sender, recipient, payload, created_at FROM transaction
|
SELECT id, tx_hash, block_height, sender, recipient, payload, created_at FROM "transaction"
|
||||||
""")
|
""")
|
||||||
|
|
||||||
# Drop old table and rename new one
|
# Drop old table and rename new one
|
||||||
op.execute("DROP TABLE transaction")
|
op.execute('DROP TABLE "transaction"')
|
||||||
op.execute("ALTER TABLE transaction_new RENAME TO transaction")
|
op.execute('ALTER TABLE "transaction_new" RENAME TO "transaction"')
|
||||||
|
|
||||||
# Recreate indexes
|
# Recreate indexes
|
||||||
op.execute("CREATE UNIQUE INDEX ix_transaction_tx_hash ON transaction (tx_hash)")
|
op.execute('CREATE UNIQUE INDEX ix_transaction_tx_hash ON "transaction" (tx_hash)')
|
||||||
op.execute("CREATE INDEX ix_transaction_block_height ON transaction (block_height)")
|
op.execute('CREATE INDEX ix_transaction_block_height ON "transaction" (block_height)')
|
||||||
op.execute("CREATE INDEX ix_transaction_created_at ON transaction (created_at)")
|
op.execute('CREATE INDEX ix_transaction_created_at ON "transaction" (created_at)')
|
||||||
|
|||||||
@@ -3,11 +3,22 @@ from __future__ import annotations
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from sqlmodel import Session, SQLModel, create_engine
|
from sqlmodel import Session, SQLModel, create_engine
|
||||||
|
from sqlalchemy import event
|
||||||
|
|
||||||
from .config import settings
|
from .config import settings
|
||||||
|
|
||||||
_engine = create_engine(f"sqlite:///{settings.db_path}", echo=False)
|
_engine = create_engine(f"sqlite:///{settings.db_path}", echo=False)
|
||||||
|
|
||||||
|
@event.listens_for(_engine, "connect")
|
||||||
|
def set_sqlite_pragma(dbapi_connection, connection_record):
|
||||||
|
cursor = dbapi_connection.cursor()
|
||||||
|
cursor.execute("PRAGMA journal_mode=WAL")
|
||||||
|
cursor.execute("PRAGMA synchronous=NORMAL")
|
||||||
|
cursor.execute("PRAGMA cache_size=-64000")
|
||||||
|
cursor.execute("PRAGMA temp_store=MEMORY")
|
||||||
|
cursor.execute("PRAGMA mmap_size=30000000000")
|
||||||
|
cursor.execute("PRAGMA busy_timeout=5000")
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
def init_db() -> None:
|
def init_db() -> None:
|
||||||
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|||||||
47
dev/scripts/create_genesis_all.py
Executable file
47
dev/scripts/create_genesis_all.py
Executable file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
sys.path.insert(0, os.path.abspath('apps/blockchain-node/src'))
|
||||||
|
|
||||||
|
from sqlmodel import select
|
||||||
|
from aitbc_chain.database import session_scope, init_db
|
||||||
|
from aitbc_chain.models import Block
|
||||||
|
from datetime import datetime
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
def compute_block_hash(chain_id: str, height: int, parent_hash: str, timestamp: datetime) -> str:
|
||||||
|
data = f"{chain_id}{height}{parent_hash}{timestamp}".encode()
|
||||||
|
return "0x" + hashlib.sha256(data).hexdigest()
|
||||||
|
|
||||||
|
def create_genesis(chain_id: str):
|
||||||
|
print(f"Creating genesis block for {chain_id}...")
|
||||||
|
|
||||||
|
with session_scope() as session:
|
||||||
|
existing = session.exec(select(Block).where(Block.chain_id == chain_id).order_by(Block.height.desc()).limit(1)).first()
|
||||||
|
if existing:
|
||||||
|
print(f"Genesis block already exists for {chain_id}: #{existing.height}")
|
||||||
|
return
|
||||||
|
|
||||||
|
timestamp = datetime.utcnow()
|
||||||
|
genesis_hash = compute_block_hash(chain_id, 0, "0x00", timestamp)
|
||||||
|
genesis = Block(
|
||||||
|
chain_id=chain_id,
|
||||||
|
height=0,
|
||||||
|
hash=genesis_hash,
|
||||||
|
parent_hash="0x00",
|
||||||
|
proposer=f"{chain_id}-proposer",
|
||||||
|
timestamp=timestamp,
|
||||||
|
tx_count=0,
|
||||||
|
state_root=None,
|
||||||
|
)
|
||||||
|
session.add(genesis)
|
||||||
|
session.commit()
|
||||||
|
print(f"Genesis block created for {chain_id}: #{genesis.height}")
|
||||||
|
print(f"Hash: {genesis.hash}")
|
||||||
|
print(f"Proposer: {genesis.proposer}")
|
||||||
|
print(f"Timestamp: {genesis.timestamp}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
init_db()
|
||||||
|
for chain in ["ait-testnet", "ait-devnet"]:
|
||||||
|
create_genesis(chain)
|
||||||
@@ -4,7 +4,7 @@ echo "🚀 COMPREHENSIVE BASELINE TEST (Pre-Deployment)"
|
|||||||
echo "==============================================="
|
echo "==============================================="
|
||||||
|
|
||||||
sites=(
|
sites=(
|
||||||
"localhost|http://127.0.0.1:8000|http://127.0.0.1:8082"
|
"localhost|http://127.0.0.1:8000|http://127.0.0.1:9080"
|
||||||
"aitbc (Primary)|http://10.1.223.93:8000|http://10.1.223.93:8082"
|
"aitbc (Primary)|http://10.1.223.93:8000|http://10.1.223.93:8082"
|
||||||
"aitbc1 (Secondary)|http://10.1.223.40:8000|http://10.1.223.40:8082"
|
"aitbc1 (Secondary)|http://10.1.223.40:8000|http://10.1.223.40:8082"
|
||||||
)
|
)
|
||||||
@@ -33,7 +33,7 @@ for site in "${sites[@]}"; do
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# 3. ZK ML Circuits (Phase 5 check)
|
# 3. ZK ML Circuits (Phase 5 check)
|
||||||
zk_circuits=$(curl -s --connect-timeout 2 "$api_url/ml-zk/circuits" || echo "FAILED")
|
zk_circuits=$(curl -s --connect-timeout 2 "$api_url/v1/ml-zk/circuits" || echo "FAILED")
|
||||||
if [[ "$zk_circuits" == *"FAILED"* ]] || [[ -z "$zk_circuits" ]] || [[ "$zk_circuits" == *"Not Found"* ]]; then
|
if [[ "$zk_circuits" == *"FAILED"* ]] || [[ -z "$zk_circuits" ]] || [[ "$zk_circuits" == *"Not Found"* ]]; then
|
||||||
echo "⚠️ ZK Circuits: Unavailable or Not Found"
|
echo "⚠️ ZK Circuits: Unavailable or Not Found"
|
||||||
else
|
else
|
||||||
|
|||||||
Reference in New Issue
Block a user