resolve merge conflicts in .gitignore and .env.example
Some checks failed
AITBC CI/CD Pipeline / lint-and-test (3.11) (push) Has been cancelled
AITBC CI/CD Pipeline / lint-and-test (3.12) (push) Has been cancelled
AITBC CI/CD Pipeline / lint-and-test (3.13) (push) Has been cancelled
AITBC CI/CD Pipeline / test-cli (push) Has been cancelled
AITBC CI/CD Pipeline / test-services (push) Has been cancelled
AITBC CI/CD Pipeline / test-production-services (push) Has been cancelled
AITBC CI/CD Pipeline / security-scan (push) Has been cancelled
AITBC CI/CD Pipeline / build (push) Has been cancelled
AITBC CI/CD Pipeline / deploy-staging (push) Has been cancelled
AITBC CI/CD Pipeline / deploy-production (push) Has been cancelled
AITBC CI/CD Pipeline / performance-test (push) Has been cancelled
AITBC CI/CD Pipeline / docs (push) Has been cancelled
AITBC CI/CD Pipeline / release (push) Has been cancelled
AITBC CI/CD Pipeline / notify (push) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.11) (push) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.12) (push) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.13) (push) Has been cancelled
AITBC CLI Level 1 Commands Test / test-summary (push) Has been cancelled
Security Scanning / Bandit Security Scan (apps/coordinator-api/src) (push) Has been cancelled
Security Scanning / Bandit Security Scan (cli/aitbc_cli) (push) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-core/src) (push) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-crypto/src) (push) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-sdk/src) (push) Has been cancelled
Security Scanning / Bandit Security Scan (tests) (push) Has been cancelled
Security Scanning / CodeQL Security Analysis (javascript) (push) Has been cancelled
Security Scanning / CodeQL Security Analysis (python) (push) Has been cancelled
Security Scanning / Dependency Security Scan (push) Has been cancelled
Security Scanning / Container Security Scan (push) Has been cancelled
Security Scanning / OSSF Scorecard (push) Has been cancelled
Security Scanning / Security Summary Report (push) Has been cancelled
AITBC CI/CD Pipeline / lint-and-test (3.11) (pull_request) Has been cancelled
AITBC CI/CD Pipeline / lint-and-test (3.12) (pull_request) Has been cancelled
AITBC CI/CD Pipeline / lint-and-test (3.13) (pull_request) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.11) (pull_request) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.12) (pull_request) Has been cancelled
AITBC CLI Level 1 Commands Test / test-cli-level1 (3.13) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (apps/coordinator-api/src) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (cli/aitbc_cli) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-core/src) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-crypto/src) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (packages/py/aitbc-sdk/src) (pull_request) Has been cancelled
Security Scanning / Bandit Security Scan (tests) (pull_request) Has been cancelled
Security Scanning / CodeQL Security Analysis (javascript) (pull_request) Has been cancelled
Security Scanning / CodeQL Security Analysis (python) (pull_request) Has been cancelled
Security Scanning / Dependency Security Scan (pull_request) Has been cancelled
Security Scanning / Container Security Scan (pull_request) Has been cancelled
Security Scanning / OSSF Scorecard (pull_request) Has been cancelled
AITBC CI/CD Pipeline / test-cli (pull_request) Has been cancelled
AITBC CI/CD Pipeline / test-services (pull_request) Has been cancelled
AITBC CI/CD Pipeline / test-production-services (pull_request) Has been cancelled
AITBC CI/CD Pipeline / security-scan (pull_request) Has been cancelled
AITBC CI/CD Pipeline / build (pull_request) Has been cancelled
AITBC CI/CD Pipeline / deploy-staging (pull_request) Has been cancelled
AITBC CI/CD Pipeline / deploy-production (pull_request) Has been cancelled
AITBC CI/CD Pipeline / performance-test (pull_request) Has been cancelled
AITBC CI/CD Pipeline / docs (pull_request) Has been cancelled
AITBC CI/CD Pipeline / release (pull_request) Has been cancelled
AITBC CI/CD Pipeline / notify (pull_request) Has been cancelled
AITBC CLI Level 1 Commands Test / test-summary (pull_request) Has been cancelled
Security Scanning / Security Summary Report (pull_request) Has been cancelled

This commit is contained in:
2026-03-23 09:25:51 +01:00
7 changed files with 241 additions and 11 deletions

View File

@@ -1,4 +1,4 @@
# AITBC Central Environment Example # AITBC Central Environment Example Template
# SECURITY NOTICE: Use a secrets manager for production. Do not commit real secrets. # SECURITY NOTICE: Use a secrets manager for production. Do not commit real secrets.
# Run: python config/security/environment-audit.py --format text # Run: python config/security/environment-audit.py --format text

43
.gitignore vendored
View File

@@ -103,6 +103,39 @@ npm-debug.log*
yarn-debug.log* yarn-debug.log*
yarn-error.log* yarn-error.log*
# ===================
# Backup Files (organized)
# ===================
backup/**/*.tmp
backup/**/*.temp
backup/**/.DS_Store
backup/updates/*.log
# Large backup files (exceed GitHub size limits)
backup/updates/*.tar.gz
backup/updates/*.zip
backup/updates/*.tar.bz2
# Application backup archives
backup/explorer_backup_*.tar.gz
backup/*_backup_*.tar.gz
backup/*_backup_*.zip
# Backup documentation and indexes
backup/BACKUP_INDEX.md
backup/*.md
backup/README.md
# ===================
# Temporary Files
# ===================
tmp/
temp/
*.tmp
*.temp
*.bak
*.backup
# =================== # ===================
# Windsurf IDE # Windsurf IDE
# =================== # ===================
@@ -123,13 +156,3 @@ home/miner/miner_wallet.json
test-results/ test-results/
**/test-results/ **/test-results/
# ===================
# Temporary Files
# ===================
tmp/
temp/
*.tmp
*.temp
*.bak
*.backup

View File

@@ -2,6 +2,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio
import hashlib import hashlib
import hmac import hmac
import time import time
@@ -9,6 +10,7 @@ from dataclasses import dataclass
from datetime import datetime from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
import httpx
from sqlmodel import Session, select from sqlmodel import Session, select
from .config import settings from .config import settings
@@ -95,12 +97,92 @@ class ChainSync:
max_reorg_depth: int = 10, max_reorg_depth: int = 10,
validator: Optional[ProposerSignatureValidator] = None, validator: Optional[ProposerSignatureValidator] = None,
validate_signatures: bool = True, validate_signatures: bool = True,
batch_size: int = 50,
poll_interval: float = 0.5,
) -> None: ) -> None:
self._session_factory = session_factory self._session_factory = session_factory
self._chain_id = chain_id or settings.chain_id self._chain_id = chain_id or settings.chain_id
self._max_reorg_depth = max_reorg_depth self._max_reorg_depth = max_reorg_depth
self._validator = validator or ProposerSignatureValidator() self._validator = validator or ProposerSignatureValidator()
self._validate_signatures = validate_signatures self._validate_signatures = validate_signatures
self._batch_size = batch_size
self._poll_interval = poll_interval
self._client = httpx.AsyncClient(timeout=10.0)
async def close(self) -> None:
"""Close HTTP client."""
await self._client.aclose()
async def fetch_blocks_range(self, start: int, end: int, source_url: str) -> List[Dict[str, Any]]:
"""Fetch a range of blocks from a source RPC."""
try:
resp = await self._client.get(f"{source_url}/rpc/blocks-range", params={"start": start, "end": end})
resp.raise_for_status()
data = resp.json()
if isinstance(data, list):
return data
elif isinstance(data, dict) and "blocks" in data:
return data["blocks"]
else:
logger.error("Unexpected blocks-range response", extra={"data": data})
return []
except Exception as e:
logger.error("Failed to fetch blocks range", extra={"start": start, "end": end, "error": str(e)})
return []
async def bulk_import_from(self, source_url: str, import_url: Optional[str] = None) -> int:
"""Bulk import missing blocks from source to catch up quickly."""
if import_url is None:
import_url = "http://127.0.0.1:8006" # default local RPC
# Get local head
with self._session_factory() as session:
local_head = session.exec(
select(Block).where(Block.chain_id == self._chain_id).order_by(Block.height.desc()).limit(1)
).first()
local_height = local_head.height if local_head else -1
# Get remote head
try:
resp = await self._client.get(f"{source_url}/rpc/head")
resp.raise_for_status()
remote_head = resp.json()
remote_height = remote_head.get("height", -1)
except Exception as e:
logger.error("Failed to fetch remote head", extra={"source_url": source_url, "error": str(e)})
return 0
if remote_height <= local_height:
logger.info("Already up to date", extra={"local_height": local_height, "remote_height": remote_height})
return 0
logger.info("Starting bulk import", extra={"local_height": local_height, "remote_height": remote_height, "batch_size": self._batch_size})
imported = 0
start_height = local_height + 1
while start_height <= remote_height:
end_height = min(start_height + self._batch_size - 1, remote_height)
batch = await self.fetch_blocks_range(start_height, end_height, source_url)
if not batch:
logger.warning("No blocks returned for range", extra={"start": start_height, "end": end_height})
break
# Import blocks in order
for block_data in batch:
result = self.import_block(block_data)
if result.accepted:
imported += 1
else:
logger.warning("Block import failed during bulk", extra={"height": block_data.get("height"), "reason": result.reason})
# Stop on first failure to avoid gaps
break
start_height = end_height + 1
# Brief pause to avoid overwhelming the DB
await asyncio.sleep(self._poll_interval)
logger.info("Bulk import completed", extra={"imported": imported, "final_height": remote_height})
return imported
def import_block(self, block_data: Dict[str, Any], transactions: Optional[List[Dict[str, Any]]] = None) -> ImportResult: def import_block(self, block_data: Dict[str, Any], transactions: Optional[List[Dict[str, Any]]] = None) -> ImportResult:
"""Import a block from a remote peer. """Import a block from a remote peer.

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python3
"""
Standalone bulk sync utility for fast catch-up.
Usage: python -m aitbc_chain.sync_cli --source http://10.1.223.40:8006 [--batch-size 100]
"""
import argparse
import asyncio
import sys
from pathlib import Path
# Add src to path for standalone execution
sys.path.insert(0, str(Path(__file__).parent))
from aitbc_chain.config import settings
from aitbc_chain.database import session_scope
from aitbc_chain.sync import ChainSync
async def main() -> None:
parser = argparse.ArgumentParser(description="Bulk import blocks from a leader to catch up quickly")
parser.add_argument("--source", default="http://10.1.223.40:8006", help="Source RPC URL")
parser.add_argument("--import-url", default="http://127.0.0.1:8006", help="Local RPC URL for import")
parser.add_argument("--batch-size", type=int, default=100, help="Blocks per batch")
parser.add_argument("--poll-interval", type=float, default=0.2, help="Seconds between batches")
args = parser.parse_args()
sync = ChainSync(
session_factory=session_scope,
chain_id=settings.chain_id,
batch_size=args.batch_size,
poll_interval=args.poll_interval,
)
try:
imported = await sync.bulk_import_from(args.source, import_url=args.import_url)
print(f"[+] Bulk sync complete: imported {imported} blocks")
finally:
await sync.close()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -348,6 +348,29 @@ def template_info(ctx, template_name, output):
error(f"Error getting template info: {str(e)}") error(f"Error getting template info: {str(e)}")
raise click.Abort() raise click.Abort()
@genesis.command(name="init-production")
@click.option('--chain-id', default='ait-mainnet', show_default=True, help='Chain ID to initialize')
@click.option('--genesis-file', default='data/genesis_prod.yaml', show_default=True, help='Path to genesis YAML (copy to /opt/aitbc/genesis_prod.yaml if needed)')
@click.option('--force', is_flag=True, help='Overwrite existing DB (removes file if present)')
@click.pass_context
def init_production(ctx, chain_id, genesis_file, force):
"""Initialize production chain DB using genesis allocations."""
db_path = Path("/opt/aitbc/data") / chain_id / "chain.db"
if db_path.exists() and force:
db_path.unlink()
python_bin = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node' / '.venv' / 'bin' / 'python3'
cmd = [
str(python_bin),
str(Path(__file__).resolve().parents[3] / 'scripts' / 'init_production_genesis.py'),
'--chain-id', chain_id,
]
try:
subprocess.run(cmd, check=True)
success(f"Initialized production genesis for {chain_id} at {db_path}")
except subprocess.CalledProcessError as e:
error(f"Genesis init failed: {e}")
raise click.Abort()
@genesis.command() @genesis.command()
@click.argument('chain_id') @click.argument('chain_id')
@click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format') @click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format')

View File

@@ -0,0 +1,58 @@
"""Sync management commands for AITBC."""
import asyncio
import sys
from pathlib import Path
import click
from ..utils import success, error, run_subprocess
@click.group()
def sync():
"""Blockchain synchronization utilities."""
pass
@sync.command()
@click.option('--source', default='http://10.1.223.40:8006', help='Source RPC URL (leader)')
@click.option('--import-url', default='http://127.0.0.1:8006', help='Local RPC URL for import')
@click.option('--batch-size', type=int, default=100, help='Blocks per batch')
@click.option('--poll-interval', type=float, default=0.2, help='Seconds between batches')
def bulk(source, import_url, batch_size, poll_interval):
"""Bulk import blocks from a leader to catch up quickly."""
try:
# Resolve paths
blockchain_dir = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node'
src_dir = blockchain_dir / 'src'
venv_python = blockchain_dir / '.venv' / 'bin' / 'python3'
sync_cli = src_dir / 'aitbc_chain' / 'sync_cli.py'
if not sync_cli.exists():
error("sync_cli.py not found. Ensure bulk sync feature is deployed.")
raise click.Abort()
cmd = [
str(venv_python),
str(sync_cli),
'--source', source,
'--import-url', import_url,
'--batch-size', str(batch_size),
'--poll-interval', str(poll_interval),
]
# Prepare environment
env = {
'PYTHONPATH': str(src_dir),
}
success(f"Running bulk sync from {source} to {import_url} (batch size: {batch_size})")
result = run_subprocess(cmd, env=env, capture_output=False)
if result.returncode != 0:
error("Bulk sync failed. Check logs for details.")
raise click.Abort()
success("Bulk sync completed.")
except Exception as e:
error(f"Error during bulk sync: {e}")
raise click.Abort()

View File

@@ -69,6 +69,7 @@ try:
except ImportError: except ImportError:
enterprise_integration_group = None enterprise_integration_group = None
from .commands.sync import sync
from .commands.explorer import explorer from .commands.explorer import explorer
from .plugins import plugin, load_plugins from .plugins import plugin, load_plugins
@@ -272,6 +273,7 @@ cli.add_command(advanced_analytics_group)
cli.add_command(ai_surveillance_group) cli.add_command(ai_surveillance_group)
if enterprise_integration_group is not None: if enterprise_integration_group is not None:
cli.add_command(enterprise_integration_group) cli.add_command(enterprise_integration_group)
cli.add_command(sync)
cli.add_command(explorer) cli.add_command(explorer)
cli.add_command(plugin) cli.add_command(plugin)
load_plugins(cli) load_plugins(cli)