Merge central-config-and-blockchain-enhancements from gitea
This commit is contained in:
114
.env.example
114
.env.example
@@ -1,63 +1,59 @@
|
|||||||
# AITBC Environment Configuration
|
# AITBC Central Environment Example Template
|
||||||
# SECURITY NOTICE: Use service-specific environment files
|
# SECURITY NOTICE: Use a secrets manager for production. Do not commit real secrets.
|
||||||
#
|
# Run: python config/security/environment-audit.py --format text
|
||||||
# For development, copy from:
|
|
||||||
# config/environments/development/coordinator.env
|
|
||||||
# config/environments/development/wallet-daemon.env
|
|
||||||
#
|
|
||||||
# For production, use AWS Secrets Manager and Kubernetes secrets
|
|
||||||
# Templates available in config/environments/production/
|
|
||||||
|
|
||||||
# =============================================================================
|
# =========================
|
||||||
# BASIC CONFIGURATION ONLY
|
# Blockchain core (example values)
|
||||||
# =============================================================================
|
# =========================
|
||||||
# Application Environment
|
chain_id=ait-devnet
|
||||||
|
supported_chains=ait-devnet
|
||||||
|
rpc_bind_host=127.0.0.1
|
||||||
|
rpc_bind_port=8006
|
||||||
|
p2p_bind_host=127.0.0.2
|
||||||
|
p2p_bind_port=8005
|
||||||
|
proposer_id=ait-devnet-proposer
|
||||||
|
proposer_key=
|
||||||
|
keystore_path=./keystore
|
||||||
|
keystore_password_file=./keystore/.password
|
||||||
|
gossip_backend=memory
|
||||||
|
gossip_broadcast_url=
|
||||||
|
db_path=./data/chain.db
|
||||||
|
mint_per_unit=0
|
||||||
|
coordinator_ratio=0.05
|
||||||
|
block_time_seconds=2
|
||||||
|
enable_block_production=false
|
||||||
|
|
||||||
|
# =========================
|
||||||
|
# Coordinator API (example values)
|
||||||
|
# =========================
|
||||||
APP_ENV=development
|
APP_ENV=development
|
||||||
DEBUG=false
|
APP_HOST=127.0.0.1
|
||||||
LOG_LEVEL=INFO
|
APP_PORT=8011
|
||||||
|
DATABASE__URL=sqlite:////opt/aitbc/data/coordinator/coordinator.db
|
||||||
|
BLOCKCHAIN_RPC_URL=http://127.0.0.1:8006
|
||||||
|
ALLOW_ORIGINS=["http://localhost:8011","http://localhost:8000"]
|
||||||
|
JOB_TTL_SECONDS=900
|
||||||
|
HEARTBEAT_INTERVAL_SECONDS=10
|
||||||
|
HEARTBEAT_TIMEOUT_SECONDS=30
|
||||||
|
RATE_LIMIT_REQUESTS=60
|
||||||
|
RATE_LIMIT_WINDOW_SECONDS=60
|
||||||
|
CLIENT_API_KEYS=["client_dev_key"]
|
||||||
|
MINER_API_KEYS=["miner_dev_key"]
|
||||||
|
ADMIN_API_KEYS=["admin_dev_key"]
|
||||||
|
HMAC_SECRET=change_this_to_a_32_byte_random_secret
|
||||||
|
JWT_SECRET=change_this_to_another_32_byte_random_secret
|
||||||
|
|
||||||
# =============================================================================
|
# =========================
|
||||||
# SECURITY REQUIREMENTS
|
# Marketplace Web (example values)
|
||||||
# =============================================================================
|
# =========================
|
||||||
# IMPORTANT: Do NOT store actual secrets in this file
|
VITE_MARKETPLACE_DATA_MODE=mock
|
||||||
# Use AWS Secrets Manager for production
|
VITE_MARKETPLACE_API=/api
|
||||||
# Generate secure keys with: openssl rand -hex 32
|
VITE_MARKETPLACE_ENABLE_BIDS=false
|
||||||
|
VITE_MARKETPLACE_REQUIRE_AUTH=false
|
||||||
|
|
||||||
# =============================================================================
|
# =========================
|
||||||
# SERVICE CONFIGURATION
|
# Notes
|
||||||
# =============================================================================
|
# =========================
|
||||||
# Choose your service configuration:
|
# For production: copy this to .env and replace with real values/secrets
|
||||||
# 1. Copy service-specific .env file from config/environments/
|
# Move secrets to a secrets manager and reference via secretRef
|
||||||
# 2. Fill in actual values (NEVER commit secrets)
|
# Validate config: python config/security/environment-audit.py --format text
|
||||||
# 3. Run: python config/security/environment-audit.py
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# DEVELOPMENT QUICK START
|
|
||||||
# =============================================================================
|
|
||||||
# For quick development setup:
|
|
||||||
# cp config/environments/development/coordinator.env .env
|
|
||||||
# cp config/environments/development/wallet-daemon.env .env.wallet
|
|
||||||
#
|
|
||||||
# Then edit the copied files with your values
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# PRODUCTION DEPLOYMENT
|
|
||||||
# =============================================================================
|
|
||||||
# For production deployment:
|
|
||||||
# 1. Use AWS Secrets Manager for all sensitive values
|
|
||||||
# 2. Reference secrets as: secretRef:secret-name:key
|
|
||||||
# 3. Run security audit before deployment
|
|
||||||
# 4. Use templates in config/environments/production/
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# SECURITY VALIDATION
|
|
||||||
# =============================================================================
|
|
||||||
# Validate your configuration:
|
|
||||||
# python config/security/environment-audit.py --format text
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# FOR MORE INFORMATION
|
|
||||||
# =============================================================================
|
|
||||||
# See: config/security/secret-validation.yaml
|
|
||||||
# See: config/security/environment-audit.py
|
|
||||||
# See: config/environments/ directory
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import time
|
import time
|
||||||
@@ -9,6 +10,7 @@ from dataclasses import dataclass
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
import httpx
|
||||||
from sqlmodel import Session, select
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
from .config import settings
|
from .config import settings
|
||||||
@@ -95,12 +97,92 @@ class ChainSync:
|
|||||||
max_reorg_depth: int = 10,
|
max_reorg_depth: int = 10,
|
||||||
validator: Optional[ProposerSignatureValidator] = None,
|
validator: Optional[ProposerSignatureValidator] = None,
|
||||||
validate_signatures: bool = True,
|
validate_signatures: bool = True,
|
||||||
|
batch_size: int = 50,
|
||||||
|
poll_interval: float = 0.5,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._session_factory = session_factory
|
self._session_factory = session_factory
|
||||||
self._chain_id = chain_id or settings.chain_id
|
self._chain_id = chain_id or settings.chain_id
|
||||||
self._max_reorg_depth = max_reorg_depth
|
self._max_reorg_depth = max_reorg_depth
|
||||||
self._validator = validator or ProposerSignatureValidator()
|
self._validator = validator or ProposerSignatureValidator()
|
||||||
self._validate_signatures = validate_signatures
|
self._validate_signatures = validate_signatures
|
||||||
|
self._batch_size = batch_size
|
||||||
|
self._poll_interval = poll_interval
|
||||||
|
self._client = httpx.AsyncClient(timeout=10.0)
|
||||||
|
|
||||||
|
async def close(self) -> None:
|
||||||
|
"""Close HTTP client."""
|
||||||
|
await self._client.aclose()
|
||||||
|
|
||||||
|
async def fetch_blocks_range(self, start: int, end: int, source_url: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Fetch a range of blocks from a source RPC."""
|
||||||
|
try:
|
||||||
|
resp = await self._client.get(f"{source_url}/rpc/blocks-range", params={"start": start, "end": end})
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
if isinstance(data, list):
|
||||||
|
return data
|
||||||
|
elif isinstance(data, dict) and "blocks" in data:
|
||||||
|
return data["blocks"]
|
||||||
|
else:
|
||||||
|
logger.error("Unexpected blocks-range response", extra={"data": data})
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to fetch blocks range", extra={"start": start, "end": end, "error": str(e)})
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def bulk_import_from(self, source_url: str, import_url: Optional[str] = None) -> int:
|
||||||
|
"""Bulk import missing blocks from source to catch up quickly."""
|
||||||
|
if import_url is None:
|
||||||
|
import_url = "http://127.0.0.1:8006" # default local RPC
|
||||||
|
|
||||||
|
# Get local head
|
||||||
|
with self._session_factory() as session:
|
||||||
|
local_head = session.exec(
|
||||||
|
select(Block).where(Block.chain_id == self._chain_id).order_by(Block.height.desc()).limit(1)
|
||||||
|
).first()
|
||||||
|
local_height = local_head.height if local_head else -1
|
||||||
|
|
||||||
|
# Get remote head
|
||||||
|
try:
|
||||||
|
resp = await self._client.get(f"{source_url}/rpc/head")
|
||||||
|
resp.raise_for_status()
|
||||||
|
remote_head = resp.json()
|
||||||
|
remote_height = remote_head.get("height", -1)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to fetch remote head", extra={"source_url": source_url, "error": str(e)})
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if remote_height <= local_height:
|
||||||
|
logger.info("Already up to date", extra={"local_height": local_height, "remote_height": remote_height})
|
||||||
|
return 0
|
||||||
|
|
||||||
|
logger.info("Starting bulk import", extra={"local_height": local_height, "remote_height": remote_height, "batch_size": self._batch_size})
|
||||||
|
|
||||||
|
imported = 0
|
||||||
|
start_height = local_height + 1
|
||||||
|
while start_height <= remote_height:
|
||||||
|
end_height = min(start_height + self._batch_size - 1, remote_height)
|
||||||
|
batch = await self.fetch_blocks_range(start_height, end_height, source_url)
|
||||||
|
if not batch:
|
||||||
|
logger.warning("No blocks returned for range", extra={"start": start_height, "end": end_height})
|
||||||
|
break
|
||||||
|
|
||||||
|
# Import blocks in order
|
||||||
|
for block_data in batch:
|
||||||
|
result = self.import_block(block_data)
|
||||||
|
if result.accepted:
|
||||||
|
imported += 1
|
||||||
|
else:
|
||||||
|
logger.warning("Block import failed during bulk", extra={"height": block_data.get("height"), "reason": result.reason})
|
||||||
|
# Stop on first failure to avoid gaps
|
||||||
|
break
|
||||||
|
|
||||||
|
start_height = end_height + 1
|
||||||
|
# Brief pause to avoid overwhelming the DB
|
||||||
|
await asyncio.sleep(self._poll_interval)
|
||||||
|
|
||||||
|
logger.info("Bulk import completed", extra={"imported": imported, "final_height": remote_height})
|
||||||
|
return imported
|
||||||
|
|
||||||
def import_block(self, block_data: Dict[str, Any], transactions: Optional[List[Dict[str, Any]]] = None) -> ImportResult:
|
def import_block(self, block_data: Dict[str, Any], transactions: Optional[List[Dict[str, Any]]] = None) -> ImportResult:
|
||||||
"""Import a block from a remote peer.
|
"""Import a block from a remote peer.
|
||||||
|
|||||||
42
apps/blockchain-node/src/aitbc_chain/sync_cli.py
Normal file
42
apps/blockchain-node/src/aitbc_chain/sync_cli.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Standalone bulk sync utility for fast catch-up.
|
||||||
|
Usage: python -m aitbc_chain.sync_cli --source http://10.1.223.40:8006 [--batch-size 100]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add src to path for standalone execution
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent))
|
||||||
|
|
||||||
|
from aitbc_chain.config import settings
|
||||||
|
from aitbc_chain.database import session_scope
|
||||||
|
from aitbc_chain.sync import ChainSync
|
||||||
|
|
||||||
|
|
||||||
|
async def main() -> None:
|
||||||
|
parser = argparse.ArgumentParser(description="Bulk import blocks from a leader to catch up quickly")
|
||||||
|
parser.add_argument("--source", default="http://10.1.223.40:8006", help="Source RPC URL")
|
||||||
|
parser.add_argument("--import-url", default="http://127.0.0.1:8006", help="Local RPC URL for import")
|
||||||
|
parser.add_argument("--batch-size", type=int, default=100, help="Blocks per batch")
|
||||||
|
parser.add_argument("--poll-interval", type=float, default=0.2, help="Seconds between batches")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
sync = ChainSync(
|
||||||
|
session_factory=session_scope,
|
||||||
|
chain_id=settings.chain_id,
|
||||||
|
batch_size=args.batch_size,
|
||||||
|
poll_interval=args.poll_interval,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
imported = await sync.bulk_import_from(args.source, import_url=args.import_url)
|
||||||
|
print(f"[+] Bulk sync complete: imported {imported} blocks")
|
||||||
|
finally:
|
||||||
|
await sync.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
@@ -303,6 +303,29 @@ def template_info(ctx, template_name, output):
|
|||||||
error(f"Error getting template info: {str(e)}")
|
error(f"Error getting template info: {str(e)}")
|
||||||
raise click.Abort()
|
raise click.Abort()
|
||||||
|
|
||||||
|
@genesis.command(name="init-production")
|
||||||
|
@click.option('--chain-id', default='ait-mainnet', show_default=True, help='Chain ID to initialize')
|
||||||
|
@click.option('--genesis-file', default='data/genesis_prod.yaml', show_default=True, help='Path to genesis YAML (copy to /opt/aitbc/genesis_prod.yaml if needed)')
|
||||||
|
@click.option('--force', is_flag=True, help='Overwrite existing DB (removes file if present)')
|
||||||
|
@click.pass_context
|
||||||
|
def init_production(ctx, chain_id, genesis_file, force):
|
||||||
|
"""Initialize production chain DB using genesis allocations."""
|
||||||
|
db_path = Path("/opt/aitbc/data") / chain_id / "chain.db"
|
||||||
|
if db_path.exists() and force:
|
||||||
|
db_path.unlink()
|
||||||
|
python_bin = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node' / '.venv' / 'bin' / 'python3'
|
||||||
|
cmd = [
|
||||||
|
str(python_bin),
|
||||||
|
str(Path(__file__).resolve().parents[3] / 'scripts' / 'init_production_genesis.py'),
|
||||||
|
'--chain-id', chain_id,
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
success(f"Initialized production genesis for {chain_id} at {db_path}")
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
error(f"Genesis init failed: {e}")
|
||||||
|
raise click.Abort()
|
||||||
|
|
||||||
@genesis.command()
|
@genesis.command()
|
||||||
@click.argument('chain_id')
|
@click.argument('chain_id')
|
||||||
@click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format')
|
@click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format')
|
||||||
|
|||||||
58
cli/aitbc_cli/commands/sync.py
Normal file
58
cli/aitbc_cli/commands/sync.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
"""Sync management commands for AITBC."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from ..utils import success, error, run_subprocess
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def sync():
|
||||||
|
"""Blockchain synchronization utilities."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@sync.command()
|
||||||
|
@click.option('--source', default='http://10.1.223.40:8006', help='Source RPC URL (leader)')
|
||||||
|
@click.option('--import-url', default='http://127.0.0.1:8006', help='Local RPC URL for import')
|
||||||
|
@click.option('--batch-size', type=int, default=100, help='Blocks per batch')
|
||||||
|
@click.option('--poll-interval', type=float, default=0.2, help='Seconds between batches')
|
||||||
|
def bulk(source, import_url, batch_size, poll_interval):
|
||||||
|
"""Bulk import blocks from a leader to catch up quickly."""
|
||||||
|
try:
|
||||||
|
# Resolve paths
|
||||||
|
blockchain_dir = Path(__file__).resolve().parents[3] / 'apps' / 'blockchain-node'
|
||||||
|
src_dir = blockchain_dir / 'src'
|
||||||
|
venv_python = blockchain_dir / '.venv' / 'bin' / 'python3'
|
||||||
|
sync_cli = src_dir / 'aitbc_chain' / 'sync_cli.py'
|
||||||
|
|
||||||
|
if not sync_cli.exists():
|
||||||
|
error("sync_cli.py not found. Ensure bulk sync feature is deployed.")
|
||||||
|
raise click.Abort()
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
str(venv_python),
|
||||||
|
str(sync_cli),
|
||||||
|
'--source', source,
|
||||||
|
'--import-url', import_url,
|
||||||
|
'--batch-size', str(batch_size),
|
||||||
|
'--poll-interval', str(poll_interval),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Prepare environment
|
||||||
|
env = {
|
||||||
|
'PYTHONPATH': str(src_dir),
|
||||||
|
}
|
||||||
|
|
||||||
|
success(f"Running bulk sync from {source} to {import_url} (batch size: {batch_size})")
|
||||||
|
result = run_subprocess(cmd, env=env, capture_output=False)
|
||||||
|
if result.returncode != 0:
|
||||||
|
error("Bulk sync failed. Check logs for details.")
|
||||||
|
raise click.Abort()
|
||||||
|
success("Bulk sync completed.")
|
||||||
|
except Exception as e:
|
||||||
|
error(f"Error during bulk sync: {e}")
|
||||||
|
raise click.Abort()
|
||||||
@@ -68,6 +68,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
enterprise_integration_group = None
|
enterprise_integration_group = None
|
||||||
|
|
||||||
|
from .commands.sync import sync
|
||||||
from .commands.explorer import explorer
|
from .commands.explorer import explorer
|
||||||
from .plugins import plugin, load_plugins
|
from .plugins import plugin, load_plugins
|
||||||
|
|
||||||
@@ -270,6 +271,7 @@ cli.add_command(advanced_analytics_group)
|
|||||||
cli.add_command(ai_surveillance_group)
|
cli.add_command(ai_surveillance_group)
|
||||||
if enterprise_integration_group is not None:
|
if enterprise_integration_group is not None:
|
||||||
cli.add_command(enterprise_integration_group)
|
cli.add_command(enterprise_integration_group)
|
||||||
|
cli.add_command(sync)
|
||||||
cli.add_command(explorer)
|
cli.add_command(explorer)
|
||||||
cli.add_command(plugin)
|
cli.add_command(plugin)
|
||||||
load_plugins(cli)
|
load_plugins(cli)
|
||||||
|
|||||||
Reference in New Issue
Block a user