diff --git a/.env.example b/.env.example index 39de6a4a..7e042023 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,4 @@ -# AITBC Central Environment Example +# AITBC Central Environment Example Template # SECURITY NOTICE: Use a secrets manager for production. Do not commit real secrets. # Run: python config/security/environment-audit.py --format text diff --git a/.gitignore b/.gitignore index f74f3bf8..28381300 100644 --- a/.gitignore +++ b/.gitignore @@ -103,6 +103,39 @@ npm-debug.log* yarn-debug.log* yarn-error.log* +# =================== +# Backup Files (organized) +# =================== +backup/**/*.tmp +backup/**/*.temp +backup/**/.DS_Store +backup/updates/*.log + +# Large backup files (exceed GitHub size limits) +backup/updates/*.tar.gz +backup/updates/*.zip +backup/updates/*.tar.bz2 + +# Application backup archives +backup/explorer_backup_*.tar.gz +backup/*_backup_*.tar.gz +backup/*_backup_*.zip + +# Backup documentation and indexes +backup/BACKUP_INDEX.md +backup/*.md +backup/README.md + +# =================== +# Temporary Files +# =================== +tmp/ +temp/ +*.tmp +*.temp +*.bak +*.backup + # =================== # Windsurf IDE # =================== @@ -123,13 +156,3 @@ home/miner/miner_wallet.json test-results/ **/test-results/ -# =================== -# Temporary Files -# =================== -tmp/ -temp/ -*.tmp -*.temp -*.bak -*.backup - diff --git a/apps/blockchain-node/src/aitbc_chain/sync.py b/apps/blockchain-node/src/aitbc_chain/sync.py index d9fe31e8..4aef1d8b 100755 --- a/apps/blockchain-node/src/aitbc_chain/sync.py +++ b/apps/blockchain-node/src/aitbc_chain/sync.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio import hashlib import hmac import time @@ -9,6 +10,7 @@ from dataclasses import dataclass from datetime import datetime from typing import Any, Dict, List, Optional, Tuple +import httpx from sqlmodel import Session, select from .config import settings @@ -95,12 +97,92 @@ class ChainSync: max_reorg_depth: int = 10, validator: Optional[ProposerSignatureValidator] = None, validate_signatures: bool = True, + batch_size: int = 50, + poll_interval: float = 0.5, ) -> None: self._session_factory = session_factory self._chain_id = chain_id or settings.chain_id self._max_reorg_depth = max_reorg_depth self._validator = validator or ProposerSignatureValidator() self._validate_signatures = validate_signatures + self._batch_size = batch_size + self._poll_interval = poll_interval + self._client = httpx.AsyncClient(timeout=10.0) + + async def close(self) -> None: + """Close HTTP client.""" + await self._client.aclose() + + async def fetch_blocks_range(self, start: int, end: int, source_url: str) -> List[Dict[str, Any]]: + """Fetch a range of blocks from a source RPC.""" + try: + resp = await self._client.get(f"{source_url}/rpc/blocks-range", params={"start": start, "end": end}) + resp.raise_for_status() + data = resp.json() + if isinstance(data, list): + return data + elif isinstance(data, dict) and "blocks" in data: + return data["blocks"] + else: + logger.error("Unexpected blocks-range response", extra={"data": data}) + return [] + except Exception as e: + logger.error("Failed to fetch blocks range", extra={"start": start, "end": end, "error": str(e)}) + return [] + + async def bulk_import_from(self, source_url: str, import_url: Optional[str] = None) -> int: + """Bulk import missing blocks from source to catch up quickly.""" + if import_url is None: + import_url = "http://127.0.0.1:8006" # default local RPC + + # Get local head + with self._session_factory() as session: + local_head = session.exec( + select(Block).where(Block.chain_id == self._chain_id).order_by(Block.height.desc()).limit(1) + ).first() + local_height = local_head.height if local_head else -1 + + # Get remote head + try: + resp = await self._client.get(f"{source_url}/rpc/head") + resp.raise_for_status() + remote_head = resp.json() + remote_height = remote_head.get("height", -1) + except Exception as e: + logger.error("Failed to fetch remote head", extra={"source_url": source_url, "error": str(e)}) + return 0 + + if remote_height <= local_height: + logger.info("Already up to date", extra={"local_height": local_height, "remote_height": remote_height}) + return 0 + + logger.info("Starting bulk import", extra={"local_height": local_height, "remote_height": remote_height, "batch_size": self._batch_size}) + + imported = 0 + start_height = local_height + 1 + while start_height <= remote_height: + end_height = min(start_height + self._batch_size - 1, remote_height) + batch = await self.fetch_blocks_range(start_height, end_height, source_url) + if not batch: + logger.warning("No blocks returned for range", extra={"start": start_height, "end": end_height}) + break + + # Import blocks in order + for block_data in batch: + result = self.import_block(block_data) + if result.accepted: + imported += 1 + else: + logger.warning("Block import failed during bulk", extra={"height": block_data.get("height"), "reason": result.reason}) + # Stop on first failure to avoid gaps + break + + start_height = end_height + 1 + # Brief pause to avoid overwhelming the DB + await asyncio.sleep(self._poll_interval) + + logger.info("Bulk import completed", extra={"imported": imported, "final_height": remote_height}) + return imported def import_block(self, block_data: Dict[str, Any], transactions: Optional[List[Dict[str, Any]]] = None) -> ImportResult: """Import a block from a remote peer. diff --git a/apps/blockchain-node/src/aitbc_chain/sync_cli.py b/apps/blockchain-node/src/aitbc_chain/sync_cli.py new file mode 100644 index 00000000..4ea5299f --- /dev/null +++ b/apps/blockchain-node/src/aitbc_chain/sync_cli.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +""" +Standalone bulk sync utility for fast catch-up. +Usage: python -m aitbc_chain.sync_cli --source http://10.1.223.40:8006 [--batch-size 100] +""" + +import argparse +import asyncio +import sys +from pathlib import Path + +# Add src to path for standalone execution +sys.path.insert(0, str(Path(__file__).parent)) + +from aitbc_chain.config import settings +from aitbc_chain.database import session_scope +from aitbc_chain.sync import ChainSync + + +async def main() -> None: + parser = argparse.ArgumentParser(description="Bulk import blocks from a leader to catch up quickly") + parser.add_argument("--source", default="http://10.1.223.40:8006", help="Source RPC URL") + parser.add_argument("--import-url", default="http://127.0.0.1:8006", help="Local RPC URL for import") + parser.add_argument("--batch-size", type=int, default=100, help="Blocks per batch") + parser.add_argument("--poll-interval", type=float, default=0.2, help="Seconds between batches") + args = parser.parse_args() + + sync = ChainSync( + session_factory=session_scope, + chain_id=settings.chain_id, + batch_size=args.batch_size, + poll_interval=args.poll_interval, + ) + try: + imported = await sync.bulk_import_from(args.source, import_url=args.import_url) + print(f"[+] Bulk sync complete: imported {imported} blocks") + finally: + await sync.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/cli/aitbc_cli/commands/genesis.py b/cli/aitbc_cli/commands/genesis.py index d373040e..9cabfabd 100755 --- a/cli/aitbc_cli/commands/genesis.py +++ b/cli/aitbc_cli/commands/genesis.py @@ -348,6 +348,29 @@ def template_info(ctx, template_name, output): error(f"Error getting template info: {str(e)}") raise click.Abort() +@genesis.command(name="init-production") +@click.option('--chain-id', default='ait-mainnet', show_default=True, help='Chain ID to initialize') +@click.option('--genesis-file', default='data/genesis_prod.yaml', show_default=True, help='Path to genesis YAML (copy to /opt/aitbc/genesis_prod.yaml if needed)') +@click.option('--force', is_flag=True, help='Overwrite existing DB (removes file if present)') +@click.pass_context +def init_production(ctx, chain_id, genesis_file, force): + """Initialize production chain DB using genesis allocations.""" + db_path = Path("/opt/aitbc/data") / chain_id / "chain.db" + if db_path.exists() and force: + db_path.unlink() + python_bin = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node' / '.venv' / 'bin' / 'python3' + cmd = [ + str(python_bin), + str(Path(__file__).resolve().parents[3] / 'scripts' / 'init_production_genesis.py'), + '--chain-id', chain_id, + ] + try: + subprocess.run(cmd, check=True) + success(f"Initialized production genesis for {chain_id} at {db_path}") + except subprocess.CalledProcessError as e: + error(f"Genesis init failed: {e}") + raise click.Abort() + @genesis.command() @click.argument('chain_id') @click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format') diff --git a/cli/aitbc_cli/commands/sync.py b/cli/aitbc_cli/commands/sync.py new file mode 100644 index 00000000..c88e5022 --- /dev/null +++ b/cli/aitbc_cli/commands/sync.py @@ -0,0 +1,58 @@ +"""Sync management commands for AITBC.""" + +import asyncio +import sys +from pathlib import Path + +import click + +from ..utils import success, error, run_subprocess + + +@click.group() +def sync(): + """Blockchain synchronization utilities.""" + pass + + +@sync.command() +@click.option('--source', default='http://10.1.223.40:8006', help='Source RPC URL (leader)') +@click.option('--import-url', default='http://127.0.0.1:8006', help='Local RPC URL for import') +@click.option('--batch-size', type=int, default=100, help='Blocks per batch') +@click.option('--poll-interval', type=float, default=0.2, help='Seconds between batches') +def bulk(source, import_url, batch_size, poll_interval): + """Bulk import blocks from a leader to catch up quickly.""" + try: + # Resolve paths + blockchain_dir = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node' + src_dir = blockchain_dir / 'src' + venv_python = blockchain_dir / '.venv' / 'bin' / 'python3' + sync_cli = src_dir / 'aitbc_chain' / 'sync_cli.py' + + if not sync_cli.exists(): + error("sync_cli.py not found. Ensure bulk sync feature is deployed.") + raise click.Abort() + + cmd = [ + str(venv_python), + str(sync_cli), + '--source', source, + '--import-url', import_url, + '--batch-size', str(batch_size), + '--poll-interval', str(poll_interval), + ] + + # Prepare environment + env = { + 'PYTHONPATH': str(src_dir), + } + + success(f"Running bulk sync from {source} to {import_url} (batch size: {batch_size})") + result = run_subprocess(cmd, env=env, capture_output=False) + if result.returncode != 0: + error("Bulk sync failed. Check logs for details.") + raise click.Abort() + success("Bulk sync completed.") + except Exception as e: + error(f"Error during bulk sync: {e}") + raise click.Abort() diff --git a/cli/aitbc_cli/main.py b/cli/aitbc_cli/main.py index 4ecd81df..1e3be39c 100755 --- a/cli/aitbc_cli/main.py +++ b/cli/aitbc_cli/main.py @@ -69,6 +69,7 @@ try: except ImportError: enterprise_integration_group = None +from .commands.sync import sync from .commands.explorer import explorer from .plugins import plugin, load_plugins @@ -272,6 +273,7 @@ cli.add_command(advanced_analytics_group) cli.add_command(ai_surveillance_group) if enterprise_integration_group is not None: cli.add_command(enterprise_integration_group) +cli.add_command(sync) cli.add_command(explorer) cli.add_command(plugin) load_plugins(cli)