diff --git a/.github/workflows/gpu-benchmark.yml b/.github/workflows/gpu-benchmark.yml
index 3649348b..74cc756c 100644
--- a/.github/workflows/gpu-benchmark.yml
+++ b/.github/workflows/gpu-benchmark.yml
@@ -36,7 +36,7 @@ jobs:
nvidia-driver-515
- name: Cache pip dependencies
- uses: actions/cache@v3
+ uses: actions/cache@v5
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
diff --git a/apps/coordinator-api/src/app/config.py.backup b/apps/coordinator-api/src/app/config.py.backup
new file mode 100644
index 00000000..e10d3ae6
--- /dev/null
+++ b/apps/coordinator-api/src/app/config.py.backup
@@ -0,0 +1,140 @@
+"""
+Unified configuration for AITBC Coordinator API
+
+Provides environment-based adapter selection and consolidated settings.
+"""
+
+import os
+from pydantic_settings import BaseSettings, SettingsConfigDict
+from typing import List, Optional
+from pathlib import Path
+
+
+class DatabaseConfig(BaseSettings):
+ """Database configuration with adapter selection."""
+
+ adapter: str = "sqlite" # sqlite, postgresql
+ url: Optional[str] = None
+ pool_size: int = 10
+ max_overflow: int = 20
+ pool_pre_ping: bool = True
+
+ @property
+ def effective_url(self) -> str:
+ """Get the effective database URL."""
+ if self.url:
+ return self.url
+
+ # Default SQLite path
+ if self.adapter == "sqlite":
+ return "sqlite:////opt/data/coordinator.db"
+
+ # Default PostgreSQL connection string
+ return f"{self.adapter}://localhost:5432/coordinator"
+
+ model_config = SettingsConfigDict(
+ env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="allow"
+ )
+
+
+class Settings(BaseSettings):
+ """Unified application settings with environment-based configuration."""
+
+ model_config = SettingsConfigDict(
+ env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="allow"
+ )
+
+ # Environment
+ app_env: str = "dev"
+ app_host: str = "127.0.0.1"
+ app_port: int = 8011
+ audit_log_dir: str = "/var/log/aitbc/audit"
+
+ # Database
+ database: DatabaseConfig = DatabaseConfig()
+
+ # API Keys
+ client_api_keys: List[str] = []
+ miner_api_keys: List[str] = []
+ admin_api_keys: List[str] = []
+
+ # Security
+ hmac_secret: Optional[str] = None
+ jwt_secret: Optional[str] = None
+ jwt_algorithm: str = "HS256"
+ jwt_expiration_hours: int = 24
+
+ # CORS
+ allow_origins: List[str] = [
+ "http://localhost:3000",
+ "http://localhost:8080",
+ "http://localhost:8000",
+ "http://localhost:8011",
+ ]
+
+ # Job Configuration
+ job_ttl_seconds: int = 900
+ heartbeat_interval_seconds: int = 10
+ heartbeat_timeout_seconds: int = 30
+
+ # Rate Limiting
+ rate_limit_requests: int = 60
+ rate_limit_window_seconds: int = 60
+
+ # Receipt Signing
+ receipt_signing_key_hex: Optional[str] = None
+ receipt_attestation_key_hex: Optional[str] = None
+
+ # Logging
+ log_level: str = "INFO"
+ log_format: str = "json" # json or text
+
+ # Mempool
+ mempool_backend: str = "database" # database, memory
+
+ # Blockchain RPC
+ blockchain_rpc_url: str = "http://localhost:8082"
+
+ # Test Configuration
+ test_mode: bool = False
+ test_database_url: Optional[str] = None
+
+ def validate_secrets(self) -> None:
+ """Validate that all required secrets are provided."""
+ if self.app_env == "production":
+ if not self.jwt_secret:
+ raise ValueError(
+ "JWT_SECRET environment variable is required in production"
+ )
+ if self.jwt_secret == "change-me-in-production":
+ raise ValueError("JWT_SECRET must be changed from default value")
+
+ @property
+ def database_url(self) -> str:
+ """Get the database URL (backward compatibility)."""
+ # Use test database if in test mode and test_database_url is set
+ if self.test_mode and self.test_database_url:
+ return self.test_database_url
+ if self.database.url:
+ return self.database.url
+ # Default SQLite path for backward compatibility
+ return "sqlite:////opt/data/coordinator.db"
+
+ @database_url.setter
+ def database_url(self, value: str):
+ """Allow setting database URL for tests"""
+ if not self.test_mode:
+ raise RuntimeError("Cannot set database_url outside of test mode")
+ self.test_database_url = value
+
+
+settings = Settings()
+
+# Enable test mode if environment variable is set
+if os.getenv("TEST_MODE") == "true":
+ settings.test_mode = True
+ if os.getenv("TEST_DATABASE_URL"):
+ settings.test_database_url = os.getenv("TEST_DATABASE_URL")
+
+# Validate secrets on import
+settings.validate_secrets()
diff --git a/apps/coordinator-api/src/app/main.py.backup b/apps/coordinator-api/src/app/main.py.backup
new file mode 100644
index 00000000..85325b73
--- /dev/null
+++ b/apps/coordinator-api/src/app/main.py.backup
@@ -0,0 +1,70 @@
+from fastapi import FastAPI
+from fastapi.middleware.cors import CORSMiddleware
+from prometheus_client import make_asgi_app
+
+from .config import settings
+from .storage import init_db
+from .routers import (
+ client,
+ miner,
+ admin,
+ marketplace,
+ marketplace_gpu,
+ exchange,
+ users,
+ services,
+ marketplace_offers,
+ zk_applications,
+ explorer,
+ payments,
+)
+from .routers.governance import router as governance
+from .routers.partners import router as partners
+from .storage.models_governance import GovernanceProposal, ProposalVote, TreasuryTransaction, GovernanceParameter
+
+
+def create_app() -> FastAPI:
+ app = FastAPI(
+ title="AITBC Coordinator API",
+ version="0.1.0",
+ description="Stage 1 coordinator service handling job orchestration between clients and miners.",
+ )
+
+ # Create database tables
+ init_db()
+
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=settings.allow_origins,
+ allow_credentials=True,
+ allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
+ allow_headers=["*"] # Allow all headers for API keys and content types
+ )
+
+ app.include_router(client, prefix="/v1")
+ app.include_router(miner, prefix="/v1")
+ app.include_router(admin, prefix="/v1")
+ app.include_router(marketplace, prefix="/v1")
+ app.include_router(marketplace_gpu, prefix="/v1")
+ app.include_router(exchange, prefix="/v1")
+ app.include_router(users, prefix="/v1/users")
+ app.include_router(services, prefix="/v1")
+ app.include_router(payments, prefix="/v1")
+ app.include_router(marketplace_offers, prefix="/v1")
+ app.include_router(zk_applications.router, prefix="/v1")
+ app.include_router(governance, prefix="/v1")
+ app.include_router(partners, prefix="/v1")
+ app.include_router(explorer, prefix="/v1")
+
+ # Add Prometheus metrics endpoint
+ metrics_app = make_asgi_app()
+ app.mount("/metrics", metrics_app)
+
+ @app.get("/v1/health", tags=["health"], summary="Service healthcheck")
+ async def health() -> dict[str, str]:
+ return {"status": "ok", "env": settings.app_env}
+
+ return app
+
+
+app = create_app()
diff --git a/apps/coordinator-api/src/app/storage/db.py.backup b/apps/coordinator-api/src/app/storage/db.py.backup
new file mode 100644
index 00000000..887dbd13
--- /dev/null
+++ b/apps/coordinator-api/src/app/storage/db.py.backup
@@ -0,0 +1,92 @@
+"""
+Database storage module for AITBC Coordinator API
+
+Provides unified database session management with connection pooling.
+"""
+
+from __future__ import annotations
+
+from contextlib import contextmanager
+from typing import Annotated, Generator
+
+from fastapi import Depends
+from sqlalchemy.engine import Engine
+from sqlalchemy.pool import QueuePool
+from sqlmodel import Session, SQLModel, create_engine
+
+from ..config import settings
+from ..domain import (
+ Job,
+ Miner,
+ MarketplaceOffer,
+ MarketplaceBid,
+ JobPayment,
+ PaymentEscrow,
+ GPURegistry,
+ GPUBooking,
+ GPUReview,
+)
+from ..domain.gpu_marketplace import ConsumerGPUProfile, EdgeGPUMetrics
+from .models_governance import GovernanceProposal, ProposalVote, TreasuryTransaction, GovernanceParameter
+
+_engine: Engine | None = None
+
+
+def get_engine() -> Engine:
+ """Get or create the database engine with connection pooling."""
+ global _engine
+
+ if _engine is None:
+ # Allow tests to override via settings.database_url (fixtures set this directly)
+ db_override = getattr(settings, "database_url", None)
+
+ db_config = settings.database
+ effective_url = db_override or db_config.effective_url
+
+ if "sqlite" in effective_url:
+ _engine = create_engine(
+ effective_url,
+ echo=False,
+ connect_args={"check_same_thread": False},
+ )
+ else:
+ _engine = create_engine(
+ effective_url,
+ echo=False,
+ poolclass=QueuePool,
+ pool_size=db_config.pool_size,
+ max_overflow=db_config.max_overflow,
+ pool_pre_ping=db_config.pool_pre_ping,
+ )
+ return _engine
+
+
+def init_db() -> Engine:
+ """Initialize database tables."""
+ engine = get_engine()
+ SQLModel.metadata.create_all(engine)
+ return engine
+
+
+@contextmanager
+def session_scope() -> Generator[Session, None, None]:
+ """Context manager for database sessions."""
+ engine = get_engine()
+ session = Session(engine)
+ try:
+ yield session
+ session.commit()
+ except Exception:
+ session.rollback()
+ raise
+ finally:
+ session.close()
+
+
+def get_session() -> Generator[Session, None, None]:
+ """Get a database session (for FastAPI dependency)."""
+ with session_scope() as session:
+ yield session
+
+
+SessionDep = Annotated[Session, Depends(get_session)]
diff --git a/apps/trade-exchange/index.prod.html.bak b/apps/trade-exchange/index.prod.html.bak
new file mode 100644
index 00000000..7d22a811
--- /dev/null
+++ b/apps/trade-exchange/index.prod.html.bak
@@ -0,0 +1,437 @@
+
+
+
+
+
+ AITBC Trade Exchange - Buy & Sell AITBC
+
+
+
+
+
+
+
+
+
+
+
Current Price
+
Loading...
+
--
+
+
+
24h Volume
+
Loading...
+
-- BTC
+
+
+
24h High / Low
+
Loading...
+
BTC
+
+
+
+
+
+
+
Order Book
+
+
+ Price (BTC)
+ Amount
+ Total
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Recent Trades
+
+
+ Price (BTC)
+ Amount
+ Time
+
+
+
+
+
+
+
+
+
+
diff --git a/cli/FILE_ORGANIZATION_SUMMARY.md b/cli/FILE_ORGANIZATION_SUMMARY.md
new file mode 100644
index 00000000..a47c9680
--- /dev/null
+++ b/cli/FILE_ORGANIZATION_SUMMARY.md
@@ -0,0 +1,118 @@
+# CLI File Organization Summary
+
+## ๐ Directory Structure
+
+This document summarizes the reorganized CLI file structure for better maintainability and clarity.
+
+## ๐๏ธ File Categories and Locations
+
+### **๐ Documentation** (`cli/docs/`)
+Implementation summaries and technical documentation:
+
+- `CLI_TEST_RESULTS.md` - Multi-chain CLI test results and validation
+- `CLI_WALLET_DAEMON_INTEGRATION_SUMMARY.md` - Wallet daemon integration implementation
+- `DEMONSTRATION_WALLET_CHAIN_CONNECTION.md` - Wallet-to-chain connection demonstration guide
+- `IMPLEMENTATION_COMPLETE_SUMMARY.md` - Complete implementation summary
+- `LOCALHOST_ONLY_ENFORCEMENT_SUMMARY.md` - Localhost-only connection enforcement
+- `WALLET_CHAIN_CONNECTION_SUMMARY.md` - Wallet chain connection implementation complete
+
+### **โ๏ธ Configuration** (`cli/config/`)
+Blockchain genesis configurations:
+
+- `genesis_ait_devnet_proper.yaml` - Genesis configuration for AITBC Development Network
+- `genesis_multi_chain_dev.yaml` - Genesis template for multi-chain development
+
+### **๐งช Tests** (`cli/tests/`)
+Test scripts and validation tools:
+
+- `test_cli_structure.py` - CLI structure validation script
+- `test_multichain_cli.py` - Multi-chain CLI functionality testing
+
+### **๐ง Setup/Build** (`cli/setup/`)
+Package setup and dependency files:
+
+- `setup.py` - Python package setup script
+- `requirements.txt` - Python dependencies list
+
+### **๏ฟฝ Virtual Environment** (`cli/venv/`)
+Main CLI virtual environment (merged from root):
+
+- Complete Python environment with all dependencies
+- CLI executable and required packages
+- Size: ~81M (optimized after merge)
+
+### **๏ฟฝ๐๏ธ Removed**
+- `README.md` - Empty file, removed to avoid confusion
+- Redundant virtual environments: `cli_venv`, `test_venv` (merged into main)
+
+## ๐ File Analysis Summary
+
+### **Documentation Files** (6 files)
+- **Purpose**: Implementation summaries, test results, and technical guides
+- **Content**: Detailed documentation of CLI features, testing results, and implementation status
+- **Audience**: Developers and system administrators
+
+### **Configuration Files** (2 files)
+- **Purpose**: Blockchain network genesis configurations
+- **Content**: YAML files defining blockchain parameters, accounts, and consensus rules
+- **Usage**: Development and testing network setup
+
+### **Test Files** (2 files)
+- **Purpose**: Automated testing and validation
+- **Content**: Python scripts for testing CLI structure and multi-chain functionality
+- **Integration**: Part of the broader test suite in `cli/tests/`
+
+### **Setup Files** (2 files)
+- **Purpose**: Package installation and dependency management
+- **Content**: Standard Python packaging files
+- **Usage**: CLI installation and deployment
+
+### **Virtual Environment** (1 environment)
+- **Purpose**: Main CLI execution environment
+- **Content**: Complete Python environment with dependencies and CLI executable
+- **Size**: 81M (optimized after merge and cleanup)
+
+## โ
Benefits of Organization
+
+1. **Clear Separation**: Each file type has a dedicated directory
+2. **Easy Navigation**: Intuitive structure for developers
+3. **Maintainability**: Related files grouped together
+4. **Scalability**: Room for growth in each category
+5. **Documentation**: Clear purpose and usage for each file type
+6. **Consolidated Environment**: Single virtual environment for all CLI operations
+
+## ๐ Migration Notes
+
+- All files have been successfully moved without breaking references
+- Test files integrated into existing test suite structure
+- Configuration files isolated for easy management
+- Documentation consolidated for better accessibility
+- **Virtual environment merged**: `/opt/aitbc/cli_venv` โ `/opt/aitbc/cli/venv`
+- **Size optimization**: Reduced from 415M + 420M to 81M total
+- **Bash alias updated**: Points to consolidated environment
+- **Redundant environments removed**: Cleaned up multiple venvs
+
+## ๐ฏ Post-Merge Status
+
+**Before Merge:**
+- `/opt/aitbc/cli_venv`: 415M (root level)
+- `/opt/aitbc/cli`: 420M (with multiple venvs)
+- **Total**: ~835M
+
+**After Merge:**
+- `/opt/aitbc/cli/venv`: 81M (consolidated)
+- `/opt/aitbc/cli`: 81M (optimized)
+- **Total**: ~81M (90% space reduction)
+
+**CLI Functionality:**
+- โ
CLI executable working: `aitbc --version` returns "aitbc, version 0.1.0"
+- โ
All dependencies installed and functional
+- โ
Bash alias correctly configured
+- โ
Complete CLI project structure maintained
+
+---
+
+**Last Updated**: March 26, 2026
+**Files Organized**: 12 files total
+**Directories Created**: 4 new directories
+**Virtual Environments**: Consolidated from 4 to 1 (90% space reduction)
diff --git a/cli/README.md b/cli/README.md
index e69de29b..ae429900 100644
--- a/cli/README.md
+++ b/cli/README.md
@@ -0,0 +1,15 @@
+# AITBC CLI
+
+Command Line Interface for AITBC Network
+
+## Installation
+
+```bash
+pip install -e .
+```
+
+## Usage
+
+```bash
+aitbc --help
+```
diff --git a/cli/aitbc_cli/commands/ai.py b/cli/aitbc_cli/commands/ai.py
index fac59bac..70604213 100644
--- a/cli/aitbc_cli/commands/ai.py
+++ b/cli/aitbc_cli/commands/ai.py
@@ -3,10 +3,8 @@ import subprocess
import sys
import uuid
import click
-import uvicorn
-from fastapi import FastAPI, HTTPException
-from pydantic import BaseModel
import httpx
+from pydantic import BaseModel
@click.group(name='ai')
def ai_group():
@@ -14,86 +12,58 @@ def ai_group():
pass
@ai_group.command()
-@click.option('--port', default=8008, show_default=True, help='Port to listen on')
+@click.option('--port', default=8008, show_default=True, help='AI provider port')
@click.option('--model', default='qwen3:8b', show_default=True, help='Ollama model name')
@click.option('--wallet', 'provider_wallet', required=True, help='Provider wallet address (for verification)')
@click.option('--marketplace-url', default='http://127.0.0.1:8014', help='Marketplace API base URL')
-def serve(port, model, provider_wallet, marketplace_url):
- """Start AI provider daemon (FastAPI server)."""
- click.echo(f"Starting AI provider on port {port}, model {model}, marketplace {marketplace_url}")
+def status(port, model, provider_wallet, marketplace_url):
+ """Check AI provider service status."""
+ try:
+ resp = httpx.get(f"http://127.0.0.1:{port}/health", timeout=5.0)
+ if resp.status_code == 200:
+ health = resp.json()
+ click.echo(f"โ
AI Provider Status: {health.get('status', 'unknown')}")
+ click.echo(f" Model: {health.get('model', 'unknown')}")
+ click.echo(f" Wallet: {health.get('wallet', 'unknown')}")
+ else:
+ click.echo(f"โ AI Provider not responding (status: {resp.status_code})")
+ except httpx.ConnectError:
+ click.echo(f"โ AI Provider not running on port {port}")
+ except Exception as e:
+ click.echo(f"โ Error checking AI Provider: {e}")
- app = FastAPI(title="AI Provider")
+@ai_group.command()
+@click.option('--port', default=8008, show_default=True, help='AI provider port')
+@click.option('--model', default='qwen3:8b', show_default=True, help='Ollama model name')
+@click.option('--wallet', 'provider_wallet', required=True, help='Provider wallet address (for verification)')
+@click.option('--marketplace-url', default='http://127.0.0.1:8014', help='Marketplace API base URL')
+def start(port, model, provider_wallet, marketplace_url):
+ """Start AI provider service (systemd)."""
+ click.echo(f"Starting AI provider service...")
+ click.echo(f" Port: {port}")
+ click.echo(f" Model: {model}")
+ click.echo(f" Wallet: {provider_wallet}")
+ click.echo(f" Marketplace: {marketplace_url}")
+
+ # Check if systemd service exists
+ service_cmd = f"systemctl start aitbc-ai-provider"
+ try:
+ subprocess.run(service_cmd.split(), check=True, capture_output=True)
+ click.echo("โ
AI Provider service started")
+ click.echo(f" Use 'aitbc ai status --port {port}' to verify")
+ except subprocess.CalledProcessError as e:
+ click.echo(f"โ Failed to start AI Provider service: {e}")
+ click.echo(" Note: AI Provider should be a separate systemd service")
- class JobRequest(BaseModel):
- prompt: str
- buyer: str # buyer wallet address
- amount: int
- txid: str | None = None # optional transaction id
-
- class JobResponse(BaseModel):
- result: str
- model: str
- job_id: str | None = None
-
- @app.get("/health")
- async def health():
- return {"status": "ok", "model": model, "wallet": provider_wallet}
-
- @app.post("/job")
- async def handle_job(req: JobRequest):
- click.echo(f"Received job from {req.buyer}: {req.prompt[:50]}...")
- # Generate a job_id
- job_id = str(uuid.uuid4())
- # Register job with marketplace (optional, best-effort)
- try:
- async with httpx.AsyncClient() as client:
- create_resp = await client.post(
- f"{marketplace_url}/v1/jobs",
- json={
- "payload": {"prompt": req.prompt, "model": model},
- "constraints": {},
- "payment_amount": req.amount,
- "payment_currency": "AITBC"
- },
- headers={"X-Api-Key": ""}, # optional API key
- timeout=5.0
- )
- if create_resp.status_code in (200, 201):
- job_data = create_resp.json()
- job_id = job_data.get("job_id", job_id)
- click.echo(f"Registered job {job_id} with marketplace")
- else:
- click.echo(f"Marketplace job registration failed: {create_resp.status_code}", err=True)
- except Exception as e:
- click.echo(f"Warning: marketplace registration skipped: {e}", err=True)
- # Process with Ollama
- try:
- async with httpx.AsyncClient() as client:
- resp = await client.post(
- "http://127.0.0.1:11434/api/generate",
- json={"model": model, "prompt": req.prompt, "stream": False},
- timeout=60.0
- )
- resp.raise_for_status()
- data = resp.json()
- result = data.get("response", "")
- except httpx.HTTPError as e:
- raise HTTPException(status_code=500, detail=f"Ollama error: {e}")
- # Update marketplace with result (if registered)
- try:
- async with httpx.AsyncClient() as client:
- patch_resp = await client.patch(
- f"{marketplace_url}/v1/jobs/{job_id}",
- json={"result": result, "state": "completed"},
- timeout=5.0
- )
- if patch_resp.status_code == 200:
- click.echo(f"Updated job {job_id} with result")
- except Exception as e:
- click.echo(f"Warning: failed to update job in marketplace: {e}", err=True)
- return JobResponse(result=result, model=model, job_id=job_id)
-
- uvicorn.run(app, host="0.0.0.0", port=port)
+@ai_group.command()
+def stop():
+ """Stop AI provider service (systemd)."""
+ click.echo("Stopping AI provider service...")
+ try:
+ subprocess.run(["systemctl", "stop", "aitbc-ai-provider"], check=True, capture_output=True)
+ click.echo("โ
AI Provider service stopped")
+ except subprocess.CalledProcessError as e:
+ click.echo(f"โ Failed to stop AI Provider service: {e}")
@ai_group.command()
@click.option('--to', required=True, help='Provider host (IP)')
diff --git a/cli/aitbc_cli/commands/blockchain.py.backup b/cli/aitbc_cli/commands/blockchain.py.backup
new file mode 100755
index 00000000..3306746a
--- /dev/null
+++ b/cli/aitbc_cli/commands/blockchain.py.backup
@@ -0,0 +1,1187 @@
+"""Blockchain commands for AITBC CLI"""
+
+import click
+import httpx
+
+def _get_node_endpoint(ctx):
+ try:
+ config = ctx.obj['config']
+ # Use the new blockchain_rpc_url from config
+ return config.blockchain_rpc_url
+ except:
+ return "http://127.0.0.1:8006" # Use new blockchain RPC port
+
+from typing import Optional, List
+from ..utils import output, error
+
+
+@click.group()
+@click.pass_context
+def blockchain(ctx):
+ """Query blockchain information and status"""
+ # Set role for blockchain commands
+ ctx.ensure_object(dict)
+ ctx.parent.detected_role = 'blockchain'
+
+
+@blockchain.command()
+@click.option("--limit", type=int, default=10, help="Number of blocks to show")
+@click.option("--from-height", type=int, help="Start from this block height")
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Query blocks across all available chains')
+@click.pass_context
+def blocks(ctx, limit: int, from_height: Optional[int], chain_id: str, all_chains: bool):
+ """List recent blocks across chains"""
+ try:
+ config = ctx.obj['config']
+
+ if all_chains:
+ # Query all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_blocks = {}
+
+ for chain in chains:
+ try:
+ node_url = _get_node_endpoint(ctx)
+
+ # Get blocks from the specific chain
+ with httpx.Client() as client:
+ if from_height:
+ # Get blocks range
+ response = client.get(
+ f"{node_url}/rpc/blocks-range",
+ params={"from_height": from_height, "limit": limit, "chain_id": chain},
+ timeout=5
+ )
+ else:
+ # Get recent blocks starting from head
+ response = client.get(
+ f"{node_url}/rpc/blocks-range",
+ params={"limit": limit, "chain_id": chain},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ all_blocks[chain] = response.json()
+ else:
+ # Fallback to getting head block for this chain
+ head_response = client.get(f"{node_url}/rpc/head?chain_id={chain}", timeout=5)
+ if head_response.status_code == 200:
+ head_data = head_response.json()
+ all_blocks[chain] = {
+ "blocks": [head_data],
+ "message": f"Showing head block only for chain {chain} (height {head_data.get('height', 'unknown')})"
+ }
+ else:
+ all_blocks[chain] = {"error": f"Failed to get blocks: HTTP {response.status_code}"}
+ except Exception as e:
+ all_blocks[chain] = {"error": str(e)}
+
+ output({
+ "chains": all_blocks,
+ "total_chains": len(chains),
+ "successful_queries": sum(1 for b in all_blocks.values() if "error" not in b),
+ "limit": limit,
+ "from_height": from_height,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+ node_url = _get_node_endpoint(ctx)
+
+ # Get blocks from the local blockchain node
+ with httpx.Client() as client:
+ if from_height:
+ # Get blocks range
+ response = client.get(
+ f"{node_url}/rpc/blocks-range",
+ params={"from_height": from_height, "limit": limit, "chain_id": target_chain},
+ timeout=5
+ )
+ else:
+ # Get recent blocks starting from head
+ response = client.get(
+ f"{node_url}/rpc/blocks-range",
+ params={"limit": limit, "chain_id": target_chain},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ blocks_data = response.json()
+ output({
+ "blocks": blocks_data,
+ "chain_id": target_chain,
+ "limit": limit,
+ "from_height": from_height,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ # Fallback to getting head block if range not available
+ head_response = client.get(f"{node_url}/rpc/head?chain_id={target_chain}", timeout=5)
+ if head_response.status_code == 200:
+ head_data = head_response.json()
+ output({
+ "blocks": [head_data],
+ "chain_id": target_chain,
+ "message": f"Showing head block only for chain {target_chain} (height {head_data.get('height', 'unknown')})",
+ "query_type": "single_chain_fallback"
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to get blocks: {response.status_code} - {response.text}")
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.argument("block_hash")
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Search block across all available chains')
+@click.pass_context
+def block(ctx, block_hash: str, chain_id: str, all_chains: bool):
+ """Get details of a specific block across chains"""
+ try:
+ config = ctx.obj['config']
+
+ if all_chains:
+ # Search for block across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ block_results = {}
+
+ for chain in chains:
+ try:
+ node_url = _get_node_endpoint(ctx)
+
+ with httpx.Client() as client:
+ # First try to get block by hash
+ response = client.get(
+ f"{node_url}/rpc/blocks/by_hash/{block_hash}?chain_id={chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ block_results[chain] = response.json()
+ else:
+ # If by_hash not available, try to get by height (if hash looks like a number)
+ try:
+ height = int(block_hash)
+ height_response = client.get(f"{node_url}/rpc/blocks/{height}?chain_id={chain}", timeout=5)
+ if height_response.status_code == 200:
+ block_results[chain] = height_response.json()
+ else:
+ block_results[chain] = {"error": f"Block not found: HTTP {height_response.status_code}"}
+ except ValueError:
+ block_results[chain] = {"error": f"Block not found: HTTP {response.status_code}"}
+
+ except Exception as e:
+ block_results[chain] = {"error": str(e)}
+
+ # Count successful searches
+ successful_searches = sum(1 for result in block_results.values() if "error" not in result)
+
+ output({
+ "block_hash": block_hash,
+ "chains": block_results,
+ "total_chains": len(chains),
+ "successful_searches": successful_searches,
+ "query_type": "all_chains",
+ "found_in_chains": [chain for chain, result in block_results.items() if "error" not in result]
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+ node_url = _get_node_endpoint(ctx)
+
+ with httpx.Client() as client:
+ # First try to get block by hash
+ response = client.get(
+ f"{node_url}/rpc/blocks/by_hash/{block_hash}?chain_id={target_chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ block_data = response.json()
+ output({
+ "block_data": block_data,
+ "chain_id": target_chain,
+ "block_hash": block_hash,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ # If by_hash not available, try to get by height (if hash looks like a number)
+ try:
+ height = int(block_hash)
+ height_response = client.get(f"{node_url}/rpc/blocks/{height}?chain_id={target_chain}", timeout=5)
+ if height_response.status_code == 200:
+ block_data = height_response.json()
+ output({
+ "block_data": block_data,
+ "chain_id": target_chain,
+ "block_hash": block_hash,
+ "height": height,
+ "query_type": "single_chain_by_height"
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Block not found in chain {target_chain}: {height_response.status_code}")
+ except ValueError:
+ error(f"Block not found in chain {target_chain}: {response.status_code}")
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.argument("tx_hash")
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Search transaction across all available chains')
+@click.pass_context
+def transaction(ctx, tx_hash: str, chain_id: str, all_chains: bool):
+ """Get transaction details across chains"""
+ config = ctx.obj['config']
+
+ try:
+ if all_chains:
+ # Search for transaction across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ tx_results = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ response = client.get(
+ f"{config.coordinator_url}/explorer/transactions/{tx_hash}?chain_id={chain}",
+ headers={"X-Api-Key": config.api_key or ""},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ tx_results[chain] = response.json()
+ else:
+ tx_results[chain] = {"error": f"Transaction not found: HTTP {response.status_code}"}
+
+ except Exception as e:
+ tx_results[chain] = {"error": str(e)}
+
+ # Count successful searches
+ successful_searches = sum(1 for result in tx_results.values() if "error" not in result)
+
+ output({
+ "tx_hash": tx_hash,
+ "chains": tx_results,
+ "total_chains": len(chains),
+ "successful_searches": successful_searches,
+ "query_type": "all_chains",
+ "found_in_chains": [chain for chain, result in tx_results.items() if "error" not in result]
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{config.coordinator_url}/explorer/transactions/{tx_hash}?chain_id={target_chain}",
+ headers={"X-Api-Key": config.api_key or ""},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ tx_data = response.json()
+ output({
+ "tx_data": tx_data,
+ "chain_id": target_chain,
+ "tx_hash": tx_hash,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Transaction not found in chain {target_chain}: {response.status_code}")
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option("--node", type=int, default=1, help="Node number (1, 2, or 3)")
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get status across all available chains')
+@click.pass_context
+def status(ctx, node: int, chain_id: str, all_chains: bool):
+ """Get blockchain node status across chains"""
+ config = ctx.obj['config']
+
+ # Map node to RPC URL using new port logic
+ node_urls = {
+ 1: "http://localhost:8006", # Primary Blockchain RPC
+ 2: "http://localhost:8026", # Development Blockchain RPC
+ 3: "http://aitbc.keisanki.net/rpc"
+ }
+
+ rpc_url = node_urls.get(node)
+ if not rpc_url:
+ error(f"Invalid node number: {node}")
+ return
+
+ try:
+ if all_chains:
+ # Get status across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_status = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ # Use health endpoint with chain context
+ health_url = f"{rpc_url}/health?chain_id={chain}"
+ response = client.get(health_url, timeout=5)
+
+ if response.status_code == 200:
+ status_data = response.json()
+ all_status[chain] = {
+ "node": node,
+ "rpc_url": rpc_url,
+ "chain_id": chain,
+ "status": status_data,
+ "healthy": True
+ }
+ else:
+ all_status[chain] = {
+ "node": node,
+ "rpc_url": rpc_url,
+ "chain_id": chain,
+ "error": f"HTTP {response.status_code}",
+ "healthy": False
+ }
+ except Exception as e:
+ all_status[chain] = {
+ "node": node,
+ "rpc_url": rpc_url,
+ "chain_id": chain,
+ "error": str(e),
+ "healthy": False
+ }
+
+ # Count healthy chains
+ healthy_chains = sum(1 for status in all_status.values() if status.get("healthy", False))
+
+ output({
+ "node": node,
+ "rpc_url": rpc_url,
+ "chains": all_status,
+ "total_chains": len(chains),
+ "healthy_chains": healthy_chains,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ # Use health endpoint with chain context
+ health_url = f"{rpc_url}/health?chain_id={target_chain}"
+ response = client.get(health_url, timeout=5)
+
+ if response.status_code == 200:
+ status_data = response.json()
+ output({
+ "node": node,
+ "rpc_url": rpc_url,
+ "chain_id": target_chain,
+ "status": status_data,
+ "healthy": True,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ output({
+ "node": node,
+ "rpc_url": rpc_url,
+ "chain_id": target_chain,
+ "error": f"HTTP {response.status_code}",
+ "healthy": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Failed to connect to node {node}: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get sync status across all available chains')
+@click.pass_context
+def sync_status(ctx, chain_id: str, all_chains: bool):
+ """Get blockchain synchronization status across chains"""
+ config = ctx.obj['config']
+
+ try:
+ if all_chains:
+ # Get sync status across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_sync_status = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ response = client.get(
+ f"{config.coordinator_url}/v1/sync-status?chain_id={chain}",
+ headers={"X-Api-Key": config.api_key or ""},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ sync_data = response.json()
+ all_sync_status[chain] = {
+ "chain_id": chain,
+ "sync_status": sync_data,
+ "available": True
+ }
+ else:
+ all_sync_status[chain] = {
+ "chain_id": chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False
+ }
+ except Exception as e:
+ all_sync_status[chain] = {
+ "chain_id": chain,
+ "error": str(e),
+ "available": False
+ }
+
+ # Count available chains
+ available_chains = sum(1 for status in all_sync_status.values() if status.get("available", False))
+
+ output({
+ "chains": all_sync_status,
+ "total_chains": len(chains),
+ "available_chains": available_chains,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{config.coordinator_url}/v1/sync-status?chain_id={target_chain}",
+ headers={"X-Api-Key": config.api_key or ""},
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ sync_data = response.json()
+ output({
+ "chain_id": target_chain,
+ "sync_status": sync_data,
+ "available": True,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ output({
+ "chain_id": target_chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get peers across all available chains')
+@click.pass_context
+def peers(ctx, chain_id: str, all_chains: bool):
+ """List connected peers across chains"""
+ try:
+ config = ctx.obj['config']
+ node_url = _get_node_endpoint(ctx)
+
+ if all_chains:
+ # Get peers across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_peers = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ # Try to get peers from the local blockchain node with chain context
+ response = client.get(
+ f"{node_url}/rpc/peers?chain_id={chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ peers_data = response.json()
+ all_peers[chain] = {
+ "chain_id": chain,
+ "peers": peers_data.get("peers", peers_data),
+ "available": True
+ }
+ else:
+ all_peers[chain] = {
+ "chain_id": chain,
+ "peers": [],
+ "message": "No P2P peers available - node running in RPC-only mode",
+ "available": False
+ }
+ except Exception as e:
+ all_peers[chain] = {
+ "chain_id": chain,
+ "peers": [],
+ "error": str(e),
+ "available": False
+ }
+
+ # Count chains with available peers
+ chains_with_peers = sum(1 for peers in all_peers.values() if peers.get("available", False))
+
+ output({
+ "chains": all_peers,
+ "total_chains": len(chains),
+ "chains_with_peers": chains_with_peers,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ # Try to get peers from the local blockchain node with chain context
+ response = client.get(
+ f"{node_url}/rpc/peers?chain_id={target_chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ peers_data = response.json()
+ output({
+ "chain_id": target_chain,
+ "peers": peers_data.get("peers", peers_data),
+ "available": True,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ # If no peers endpoint, return meaningful message
+ output({
+ "chain_id": target_chain,
+ "peers": [],
+ "message": "No P2P peers available - node running in RPC-only mode",
+ "available": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get info across all available chains')
+@click.pass_context
+def info(ctx, chain_id: str, all_chains: bool):
+ """Get blockchain information across chains"""
+ try:
+ config = ctx.obj['config']
+ node_url = _get_node_endpoint(ctx)
+
+ if all_chains:
+ # Get info across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_info = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ # Get head block for basic info with chain context
+ response = client.get(
+ f"{node_url}/rpc/head?chain_id={chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ head_data = response.json()
+ # Create basic info from head block
+ all_info[chain] = {
+ "chain_id": chain,
+ "height": head_data.get("height"),
+ "latest_block": head_data.get("hash"),
+ "timestamp": head_data.get("timestamp"),
+ "transactions_in_block": head_data.get("tx_count", 0),
+ "status": "active",
+ "available": True
+ }
+ else:
+ all_info[chain] = {
+ "chain_id": chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False
+ }
+ except Exception as e:
+ all_info[chain] = {
+ "chain_id": chain,
+ "error": str(e),
+ "available": False
+ }
+
+ # Count available chains
+ available_chains = sum(1 for info in all_info.values() if info.get("available", False))
+
+ output({
+ "chains": all_info,
+ "total_chains": len(chains),
+ "available_chains": available_chains,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ # Get head block for basic info with chain context
+ response = client.get(
+ f"{node_url}/rpc/head?chain_id={target_chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ head_data = response.json()
+ # Create basic info from head block
+ info_data = {
+ "chain_id": target_chain,
+ "height": head_data.get("height"),
+ "latest_block": head_data.get("hash"),
+ "timestamp": head_data.get("timestamp"),
+ "transactions_in_block": head_data.get("tx_count", 0),
+ "status": "active",
+ "available": True,
+ "query_type": "single_chain"
+ }
+ output(info_data, ctx.obj['output_format'])
+ else:
+ output({
+ "chain_id": target_chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get supply across all available chains')
+@click.pass_context
+def supply(ctx, chain_id: str, all_chains: bool):
+ """Get token supply information across chains"""
+ try:
+ config = ctx.obj['config']
+ node_url = _get_node_endpoint(ctx)
+
+ if all_chains:
+ # Get supply across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_supply = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ response = client.get(
+ f"{node_url}/rpc/supply?chain_id={chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ supply_data = response.json()
+ all_supply[chain] = {
+ "chain_id": chain,
+ "supply": supply_data,
+ "available": True
+ }
+ else:
+ all_supply[chain] = {
+ "chain_id": chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False
+ }
+ except Exception as e:
+ all_supply[chain] = {
+ "chain_id": chain,
+ "error": str(e),
+ "available": False
+ }
+
+ # Count chains with available supply data
+ chains_with_supply = sum(1 for supply in all_supply.values() if supply.get("available", False))
+
+ output({
+ "chains": all_supply,
+ "total_chains": len(chains),
+ "chains_with_supply": chains_with_supply,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{node_url}/rpc/supply?chain_id={target_chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ supply_data = response.json()
+ output({
+ "chain_id": target_chain,
+ "supply": supply_data,
+ "available": True,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ output({
+ "chain_id": target_chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Get validators across all available chains')
+@click.pass_context
+def validators(ctx, chain_id: str, all_chains: bool):
+ """List blockchain validators across chains"""
+ try:
+ config = ctx.obj['config']
+ node_url = _get_node_endpoint(ctx)
+
+ if all_chains:
+ # Get validators across all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ all_validators = {}
+
+ for chain in chains:
+ try:
+ with httpx.Client() as client:
+ response = client.get(
+ f"{node_url}/rpc/validators?chain_id={chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ validators_data = response.json()
+ all_validators[chain] = {
+ "chain_id": chain,
+ "validators": validators_data.get("validators", validators_data),
+ "available": True
+ }
+ else:
+ all_validators[chain] = {
+ "chain_id": chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False
+ }
+ except Exception as e:
+ all_validators[chain] = {
+ "chain_id": chain,
+ "error": str(e),
+ "available": False
+ }
+
+ # Count chains with available validators
+ chains_with_validators = sum(1 for validators in all_validators.values() if validators.get("available", False))
+
+ output({
+ "chains": all_validators,
+ "total_chains": len(chains),
+ "chains_with_validators": chains_with_validators,
+ "query_type": "all_chains"
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{node_url}/rpc/validators?chain_id={target_chain}",
+ timeout=5
+ )
+
+ if response.status_code == 200:
+ validators_data = response.json()
+ output({
+ "chain_id": target_chain,
+ "validators": validators_data.get("validators", validators_data),
+ "available": True,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ output({
+ "chain_id": target_chain,
+ "error": f"HTTP {response.status_code}",
+ "available": False,
+ "query_type": "single_chain_error"
+ }, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+@blockchain.command()
+@click.option('--chain-id', required=True, help='Chain ID')
+@click.pass_context
+def genesis(ctx, chain_id):
+ """Get the genesis block of a chain"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+ with httpx.Client() as client:
+ # We assume node 1 is running on port 8082, but let's just hit the first configured node
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/blocks/0?chain_id={chain_id}",
+ timeout=5
+ )
+ if response.status_code == 200:
+ output(response.json(), ctx.obj['output_format'])
+ else:
+ error(f"Failed to get genesis block: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+@blockchain.command()
+@click.option('--chain-id', required=True, help='Chain ID')
+@click.pass_context
+def transactions(ctx, chain_id):
+ """Get latest transactions on a chain"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+ with httpx.Client() as client:
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/transactions?chain_id={chain_id}",
+ timeout=5
+ )
+ if response.status_code == 200:
+ output(response.json(), ctx.obj['output_format'])
+ else:
+ error(f"Failed to get transactions: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+@blockchain.command()
+@click.option('--chain-id', required=True, help='Chain ID')
+@click.pass_context
+def head(ctx, chain_id):
+ """Get the head block of a chain"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+ with httpx.Client() as client:
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/head?chain_id={chain_id}",
+ timeout=5
+ )
+ if response.status_code == 200:
+ output(response.json(), ctx.obj['output_format'])
+ else:
+ error(f"Failed to get head block: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain-id', required=True, help='Chain ID')
+@click.option('--from', 'from_addr', required=True, help='Sender address')
+@click.option('--to', required=True, help='Recipient address')
+@click.option('--data', required=True, help='Transaction data payload')
+@click.option('--nonce', type=int, default=0, help='Nonce')
+@click.pass_context
+def send(ctx, chain_id, from_addr, to, data, nonce):
+ """Send a transaction to a chain"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+ with httpx.Client() as client:
+ tx_payload = {
+ "type": "TRANSFER",
+ "chain_id": chain_id,
+ "from_address": from_addr,
+ "to_address": to,
+ "value": 0,
+ "gas_limit": 100000,
+ "gas_price": 1,
+ "nonce": nonce,
+ "data": data,
+ "signature": "mock_signature"
+ }
+
+ response = client.post(
+ f"{_get_node_endpoint(ctx)}/rpc/sendTx",
+ json=tx_payload,
+ timeout=5
+ )
+ if response.status_code in (200, 201):
+ output(response.json(), ctx.obj['output_format'])
+ else:
+ error(f"Failed to send transaction: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--address', required=True, help='Wallet address')
+@click.option('--chain-id', help='Specific chain ID to query (default: ait-devnet)')
+@click.option('--all-chains', is_flag=True, help='Query balance across all available chains')
+@click.pass_context
+def balance(ctx, address, chain_id, all_chains):
+ """Get the balance of an address across chains"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+
+ if all_chains:
+ # Query all available chains
+ chains = ['ait-devnet', 'ait-testnet'] # TODO: Get from chain registry
+ balances = {}
+
+ with httpx.Client() as client:
+ for chain in chains:
+ try:
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/getBalance/{address}?chain_id={chain}",
+ timeout=5
+ )
+ if response.status_code == 200:
+ balances[chain] = response.json()
+ else:
+ balances[chain] = {"error": f"HTTP {response.status_code}"}
+ except Exception as e:
+ balances[chain] = {"error": str(e)}
+
+ output({
+ "address": address,
+ "chains": balances,
+ "total_chains": len(chains),
+ "successful_queries": sum(1 for b in balances.values() if "error" not in b)
+ }, ctx.obj['output_format'])
+
+ else:
+ # Query specific chain (default to ait-devnet if not specified)
+ target_chain = chain_id or 'ait-devnet'
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/getBalance/{address}?chain_id={target_chain}",
+ timeout=5
+ )
+ if response.status_code == 200:
+ balance_data = response.json()
+ output({
+ "address": address,
+ "chain_id": target_chain,
+ "balance": balance_data,
+ "query_type": "single_chain"
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to get balance: {response.status_code} - {response.text}")
+
+ except Exception as e:
+ error(f"Network error: {e}")
+
+@blockchain.command()
+@click.option('--address', required=True, help='Wallet address')
+@click.option('--amount', type=int, default=1000, help='Amount to mint')
+@click.pass_context
+def faucet(ctx, address, amount):
+ """Mint devnet funds to an address"""
+ config = ctx.obj['config']
+ try:
+ import httpx
+ with httpx.Client() as client:
+ response = client.post(
+ f"{_get_node_endpoint(ctx)}/rpc/admin/mintFaucet",
+ json={"address": address, "amount": amount, "chain_id": "ait-devnet"},
+ timeout=5
+ )
+ if response.status_code in (200, 201):
+ output(response.json(), ctx.obj['output_format'])
+ else:
+ error(f"Failed to use faucet: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@blockchain.command()
+@click.option('--chain', required=True, help='Chain ID to verify (e.g., ait-mainnet, ait-devnet)')
+@click.option('--genesis-hash', help='Expected genesis hash to verify against')
+@click.option('--verify-signatures', is_flag=True, default=True, help='Verify genesis block signatures')
+@click.pass_context
+def verify_genesis(ctx, chain: str, genesis_hash: Optional[str], verify_signatures: bool):
+ """Verify genesis block integrity for a specific chain"""
+ try:
+ import httpx
+ from ..utils import success
+
+ with httpx.Client() as client:
+ # Get genesis block for the specified chain
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/getGenesisBlock?chain_id={chain}",
+ timeout=10
+ )
+
+ if response.status_code != 200:
+ error(f"Failed to get genesis block for chain '{chain}': {response.status_code}")
+ return
+
+ genesis_data = response.json()
+
+ # Verification results
+ verification_results = {
+ "chain_id": chain,
+ "genesis_block": genesis_data,
+ "verification_passed": True,
+ "checks": {}
+ }
+
+ # Check 1: Genesis hash verification
+ if genesis_hash:
+ actual_hash = genesis_data.get("hash")
+ if actual_hash == genesis_hash:
+ verification_results["checks"]["hash_match"] = {
+ "status": "passed",
+ "expected": genesis_hash,
+ "actual": actual_hash
+ }
+ success(f"โ
Genesis hash matches expected value")
+ else:
+ verification_results["checks"]["hash_match"] = {
+ "status": "failed",
+ "expected": genesis_hash,
+ "actual": actual_hash
+ }
+ verification_results["verification_passed"] = False
+ error(f"โ Genesis hash mismatch!")
+ error(f"Expected: {genesis_hash}")
+ error(f"Actual: {actual_hash}")
+
+ # Check 2: Genesis block structure
+ required_fields = ["hash", "previous_hash", "timestamp", "transactions", "nonce"]
+ missing_fields = [field for field in required_fields if field not in genesis_data]
+
+ if not missing_fields:
+ verification_results["checks"]["structure"] = {
+ "status": "passed",
+ "required_fields": required_fields
+ }
+ success(f"โ
Genesis block structure is valid")
+ else:
+ verification_results["checks"]["structure"] = {
+ "status": "failed",
+ "missing_fields": missing_fields
+ }
+ verification_results["verification_passed"] = False
+ error(f"โ Genesis block missing required fields: {missing_fields}")
+
+ # Check 3: Signature verification (if requested)
+ if verify_signatures and "signature" in genesis_data:
+ # This would implement actual signature verification
+ # For now, we'll just check if signature exists
+ verification_results["checks"]["signature"] = {
+ "status": "passed",
+ "signature_present": True
+ }
+ success(f"โ
Genesis block signature is present")
+ elif verify_signatures:
+ verification_results["checks"]["signature"] = {
+ "status": "warning",
+ "message": "No signature found in genesis block"
+ }
+ warning(f"โ ๏ธ No signature found in genesis block")
+
+ # Check 4: Previous hash should be null/empty for genesis
+ prev_hash = genesis_data.get("previous_hash")
+ if prev_hash in [None, "", "0", "0x0000000000000000000000000000000000000000000000000000000000000000"]:
+ verification_results["checks"]["previous_hash"] = {
+ "status": "passed",
+ "previous_hash": prev_hash
+ }
+ success(f"โ
Genesis block previous hash is correct (null)")
+ else:
+ verification_results["checks"]["previous_hash"] = {
+ "status": "failed",
+ "previous_hash": prev_hash
+ }
+ verification_results["verification_passed"] = False
+ error(f"โ Genesis block previous hash should be null")
+
+ # Final result
+ if verification_results["verification_passed"]:
+ success(f"๐ Genesis block verification PASSED for chain '{chain}'")
+ else:
+ error(f"โ Genesis block verification FAILED for chain '{chain}'")
+
+ output(verification_results, ctx.obj['output_format'])
+
+ except Exception as e:
+ error(f"Failed to verify genesis block: {e}")
+
+
+@blockchain.command()
+@click.option('--chain', required=True, help='Chain ID to get genesis hash for')
+@click.pass_context
+def genesis_hash(ctx, chain: str):
+ """Get the genesis block hash for a specific chain"""
+ try:
+ import httpx
+ from ..utils import success
+
+ with httpx.Client() as client:
+ response = client.get(
+ f"{_get_node_endpoint(ctx)}/rpc/getGenesisBlock?chain_id={chain}",
+ timeout=10
+ )
+
+ if response.status_code != 200:
+ error(f"Failed to get genesis block for chain '{chain}': {response.status_code}")
+ return
+
+ genesis_data = response.json()
+ genesis_hash_value = genesis_data.get("hash")
+
+ if genesis_hash_value:
+ success(f"Genesis hash for chain '{chain}':")
+ output({
+ "chain_id": chain,
+ "genesis_hash": genesis_hash_value,
+ "genesis_block": {
+ "hash": genesis_hash_value,
+ "timestamp": genesis_data.get("timestamp"),
+ "transaction_count": len(genesis_data.get("transactions", [])),
+ "nonce": genesis_data.get("nonce")
+ }
+ }, ctx.obj['output_format'])
+ else:
+ error(f"No hash found in genesis block for chain '{chain}'")
+
+ except Exception as e:
+ error(f"Failed to get genesis hash: {e}")
+
+
+def warning(message: str):
+ """Display warning message"""
+ click.echo(click.style(f"โ ๏ธ {message}", fg='yellow'))
diff --git a/cli/aitbc_cli/commands/miner.py.backup b/cli/aitbc_cli/commands/miner.py.backup
new file mode 100755
index 00000000..21037f6b
--- /dev/null
+++ b/cli/aitbc_cli/commands/miner.py.backup
@@ -0,0 +1,637 @@
+"""Miner commands for AITBC CLI"""
+
+import click
+import httpx
+import json
+import time
+import concurrent.futures
+from typing import Optional, Dict, Any, List
+from ..utils import output, error, success
+
+
+@click.group(invoke_without_command=True)
+@click.pass_context
+def miner(ctx):
+ """Register as miner and process jobs"""
+ # Set role for miner commands - this will be used by parent context
+ ctx.ensure_object(dict)
+ # Set role at the highest level context (CLI root)
+ ctx.find_root().detected_role = 'miner'
+
+ # If no subcommand was invoked, show help
+ if ctx.invoked_subcommand is None:
+ click.echo(ctx.get_help())
+
+
+@miner.command()
+@click.option("--gpu", help="GPU model name")
+@click.option("--memory", type=int, help="GPU memory in GB")
+@click.option("--cuda-cores", type=int, help="Number of CUDA cores")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def register(ctx, gpu: Optional[str], memory: Optional[int],
+ cuda_cores: Optional[int], miner_id: str):
+ """Register as a miner with the coordinator"""
+ config = ctx.obj['config']
+
+ # Build capabilities
+ capabilities = {}
+ if gpu:
+ capabilities["gpu"] = {"model": gpu}
+ if memory:
+ if "gpu" not in capabilities:
+ capabilities["gpu"] = {}
+ capabilities["gpu"]["memory_gb"] = memory
+ if cuda_cores:
+ if "gpu" not in capabilities:
+ capabilities["gpu"] = {}
+ capabilities["gpu"]["cuda_cores"] = cuda_cores
+
+ # Default capabilities if none provided
+ if not capabilities:
+ capabilities = {
+ "cpu": {"cores": 4},
+ "memory": {"gb": 16}
+ }
+
+ try:
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/register",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={"capabilities": capabilities}
+ )
+
+ if response.status_code in (200, 204):
+ output({
+ "miner_id": miner_id,
+ "status": "registered",
+ "capabilities": capabilities
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to register: {response.status_code} - {response.text}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@miner.command()
+@click.option("--wait", type=int, default=5, help="Max wait time in seconds")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def poll(ctx, wait: int, miner_id: str):
+ """Poll for a single job"""
+ config = ctx.obj['config']
+
+ try:
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/poll",
+ json={"max_wait_seconds": 5},
+ headers={
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ timeout=wait + 5
+ )
+
+ if response.status_code in (200, 204):
+ if response.status_code == 204:
+ output({"message": "No jobs available"}, ctx.obj['output_format'])
+ else:
+ job = response.json()
+ if job:
+ output(job, ctx.obj['output_format'])
+ else:
+ output({"message": "No jobs available"}, ctx.obj['output_format'])
+ else:
+ error(f"Failed to poll: {response.status_code}")
+ except httpx.TimeoutException:
+ output({"message": f"No jobs available within {wait} seconds"}, ctx.obj['output_format'])
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@miner.command()
+@click.option("--jobs", type=int, default=1, help="Number of jobs to process")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def mine(ctx, jobs: int, miner_id: str):
+ """Mine continuously for specified number of jobs"""
+ config = ctx.obj['config']
+
+ processed = 0
+ while processed < jobs:
+ try:
+ with httpx.Client() as client:
+ # Poll for job
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/poll",
+ json={"max_wait_seconds": 5},
+ headers={
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ timeout=30
+ )
+
+ if response.status_code in (200, 204):
+ if response.status_code == 204:
+ time.sleep(5)
+ continue
+ job = response.json()
+ if job:
+ job_id = job.get('job_id')
+ output({
+ "job_id": job_id,
+ "status": "processing",
+ "job_number": processed + 1
+ }, ctx.obj['output_format'])
+
+ # Simulate processing (in real implementation, do actual work)
+ time.sleep(2)
+
+ # Submit result
+ result_response = client.post(
+ f"{config.coordinator_url}/v1/miners/{job_id}/result",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={
+ "result": {"output": f"Processed job {job_id}"},
+ "metrics": {}
+ }
+ )
+
+ if result_response.status_code == 200:
+ success(f"Job {job_id} completed successfully")
+ processed += 1
+ else:
+ error(f"Failed to submit result: {result_response.status_code}")
+ else:
+ # No job available, wait a bit
+ time.sleep(5)
+ else:
+ error(f"Failed to poll: {response.status_code}")
+ break
+
+ except Exception as e:
+ error(f"Error: {e}")
+ break
+
+ output({
+ "total_processed": processed,
+ "miner_id": miner_id
+ }, ctx.obj['output_format'])
+
+
+@miner.command()
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def heartbeat(ctx, miner_id: str):
+ """Send heartbeat to coordinator"""
+ config = ctx.obj['config']
+
+ try:
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/heartbeat",
+ headers={
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={
+ "inflight": 0,
+ "status": "ONLINE",
+ "metadata": {}
+ }
+ )
+
+ if response.status_code in (200, 204):
+ output({
+ "miner_id": miner_id,
+ "status": "heartbeat_sent",
+ "timestamp": time.time()
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to send heartbeat: {response.status_code}")
+ except Exception as e:
+ error(f"Network error: {e}")
+
+
+@miner.command()
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def status(ctx, miner_id: str):
+ """Check miner status"""
+ config = ctx.obj['config']
+
+ # This would typically query a miner status endpoint
+ # For now, we'll just show the miner info
+ output({
+ "miner_id": miner_id,
+ "coordinator": config.coordinator_url,
+ "status": "active"
+ }, ctx.obj['output_format'])
+
+
+@miner.command()
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.option("--from-time", help="Filter from timestamp (ISO format)")
+@click.option("--to-time", help="Filter to timestamp (ISO format)")
+@click.pass_context
+def earnings(ctx, miner_id: str, from_time: Optional[str], to_time: Optional[str]):
+ """Show miner earnings"""
+ config = ctx.obj['config']
+
+ try:
+ params = {"miner_id": miner_id}
+ if from_time:
+ params["from_time"] = from_time
+ if to_time:
+ params["to_time"] = to_time
+
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/{miner_id}/earnings",
+ params=params,
+ headers={"X-Api-Key": config.api_key or ""}
+ )
+
+ if response.status_code in (200, 204):
+ data = response.json()
+ output(data, ctx.obj['output_format'])
+ else:
+ error(f"Failed to get earnings: {response.status_code}")
+ ctx.exit(1)
+ except Exception as e:
+ error(f"Network error: {e}")
+ ctx.exit(1)
+
+
+@miner.command(name="update-capabilities")
+@click.option("--gpu", help="GPU model name")
+@click.option("--memory", type=int, help="GPU memory in GB")
+@click.option("--cuda-cores", type=int, help="Number of CUDA cores")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def update_capabilities(ctx, gpu: Optional[str], memory: Optional[int],
+ cuda_cores: Optional[int], miner_id: str):
+ """Update miner GPU capabilities"""
+ config = ctx.obj['config']
+
+ capabilities = {}
+ if gpu:
+ capabilities["gpu"] = {"model": gpu}
+ if memory:
+ if "gpu" not in capabilities:
+ capabilities["gpu"] = {}
+ capabilities["gpu"]["memory_gb"] = memory
+ if cuda_cores:
+ if "gpu" not in capabilities:
+ capabilities["gpu"] = {}
+ capabilities["gpu"]["cuda_cores"] = cuda_cores
+
+ if not capabilities:
+ error("No capabilities specified. Use --gpu, --memory, or --cuda-cores.")
+ return
+
+ try:
+ with httpx.Client() as client:
+ response = client.put(
+ f"{config.coordinator_url}/v1/miners/{miner_id}/capabilities",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or ""
+ },
+ json={"capabilities": capabilities}
+ )
+
+ if response.status_code in (200, 204):
+ output({
+ "miner_id": miner_id,
+ "status": "capabilities_updated",
+ "capabilities": capabilities
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to update capabilities: {response.status_code}")
+ ctx.exit(1)
+ except Exception as e:
+ error(f"Network error: {e}")
+ ctx.exit(1)
+
+
+@miner.command()
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.option("--force", is_flag=True, help="Force deregistration without confirmation")
+@click.pass_context
+def deregister(ctx, miner_id: str, force: bool):
+ """Deregister miner from the coordinator"""
+ if not force:
+ if not click.confirm(f"Deregister miner '{miner_id}'?"):
+ click.echo("Cancelled.")
+ return
+
+ config = ctx.obj['config']
+
+ try:
+ with httpx.Client() as client:
+ response = client.delete(
+ f"{config.coordinator_url}/v1/miners/{miner_id}",
+ headers={"X-Api-Key": config.api_key or ""}
+ )
+
+ if response.status_code in (200, 204):
+ output({
+ "miner_id": miner_id,
+ "status": "deregistered"
+ }, ctx.obj['output_format'])
+ else:
+ error(f"Failed to deregister: {response.status_code}")
+ ctx.exit(1)
+ except Exception as e:
+ error(f"Network error: {e}")
+ ctx.exit(1)
+
+
+@miner.command()
+@click.option("--limit", default=10, help="Number of jobs to show")
+@click.option("--type", "job_type", help="Filter by job type")
+@click.option("--min-reward", type=float, help="Minimum reward threshold")
+@click.option("--status", "job_status", help="Filter by status (pending, running, completed, failed)")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def jobs(ctx, limit: int, job_type: Optional[str], min_reward: Optional[float],
+ job_status: Optional[str], miner_id: str):
+ """List miner jobs with filtering"""
+ config = ctx.obj['config']
+
+ try:
+ params = {"limit": limit, "miner_id": miner_id}
+ if job_type:
+ params["type"] = job_type
+ if min_reward is not None:
+ params["min_reward"] = min_reward
+ if job_status:
+ params["status"] = job_status
+
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/{miner_id}/jobs",
+ params=params,
+ headers={"X-Api-Key": config.api_key or ""}
+ )
+
+ if response.status_code in (200, 204):
+ data = response.json()
+ output(data, ctx.obj['output_format'])
+ else:
+ error(f"Failed to get jobs: {response.status_code}")
+ ctx.exit(1)
+ except Exception as e:
+ error(f"Network error: {e}")
+ ctx.exit(1)
+
+
+def _process_single_job(config, miner_id: str, worker_id: int) -> Dict[str, Any]:
+ """Process a single job (used by concurrent mine)"""
+ try:
+ with httpx.Client() as http_client:
+ response = http_client.post(
+ f"{config.coordinator_url}/v1/miners/poll",
+ json={"max_wait_seconds": 5},
+ headers={
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ timeout=30
+ )
+
+ if response.status_code == 204:
+ return {"worker": worker_id, "status": "no_job"}
+ if response.status_code == 200:
+ job = response.json()
+ if job:
+ job_id = job.get('job_id')
+ time.sleep(2) # Simulate processing
+
+ result_response = http_client.post(
+ f"{config.coordinator_url}/v1/miners/{job_id}/result",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={"result": {"output": f"Processed by worker {worker_id}"}, "metrics": {}}
+ )
+
+ return {
+ "worker": worker_id,
+ "job_id": job_id,
+ "status": "completed" if result_response.status_code == 200 else "failed"
+ }
+ return {"worker": worker_id, "status": "no_job"}
+ except Exception as e:
+ return {"worker": worker_id, "status": "error", "error": str(e)}
+
+
+def _run_ollama_inference(ollama_url: str, model: str, prompt: str) -> Dict[str, Any]:
+ """Run inference through local Ollama instance"""
+ try:
+ with httpx.Client(timeout=120) as client:
+ response = client.post(
+ f"{ollama_url}/api/generate",
+ json={
+ "model": model,
+ "prompt": prompt,
+ "stream": False
+ }
+ )
+ if response.status_code == 200:
+ data = response.json()
+ return {
+ "response": data.get("response", ""),
+ "model": data.get("model", model),
+ "total_duration": data.get("total_duration", 0),
+ "eval_count": data.get("eval_count", 0),
+ "eval_duration": data.get("eval_duration", 0),
+ }
+ else:
+ return {"error": f"Ollama returned {response.status_code}"}
+ except Exception as e:
+ return {"error": str(e)}
+
+
+@miner.command(name="mine-ollama")
+@click.option("--jobs", type=int, default=1, help="Number of jobs to process")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.option("--ollama-url", default="http://localhost:11434", help="Ollama API URL")
+@click.option("--model", default="gemma3:1b", help="Ollama model to use")
+@click.pass_context
+def mine_ollama(ctx, jobs: int, miner_id: str, ollama_url: str, model: str):
+ """Mine jobs using local Ollama for GPU inference"""
+ config = ctx.obj['config']
+
+ # Verify Ollama is reachable
+ try:
+ with httpx.Client(timeout=5) as client:
+ resp = client.get(f"{ollama_url}/api/tags")
+ if resp.status_code != 200:
+ error(f"Cannot reach Ollama at {ollama_url}")
+ return
+ models = [m["name"] for m in resp.json().get("models", [])]
+ if model not in models:
+ error(f"Model '{model}' not found. Available: {', '.join(models)}")
+ return
+ success(f"Ollama connected: {ollama_url} | model: {model}")
+ except Exception as e:
+ error(f"Cannot connect to Ollama: {e}")
+ return
+
+ processed = 0
+ while processed < jobs:
+ try:
+ with httpx.Client() as client:
+ response = client.post(
+ f"{config.coordinator_url}/v1/miners/poll",
+ json={"max_wait_seconds": 10},
+ headers={
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ timeout=30
+ )
+
+ if response.status_code == 204:
+ time.sleep(5)
+ continue
+
+ if response.status_code != 200:
+ error(f"Failed to poll: {response.status_code}")
+ break
+
+ job = response.json()
+ if not job:
+ time.sleep(5)
+ continue
+
+ job_id = job.get('job_id')
+ payload = job.get('payload', {})
+ prompt = payload.get('prompt', '')
+ job_model = payload.get('model', model)
+
+ output({
+ "job_id": job_id,
+ "status": "processing",
+ "prompt": prompt[:80] + ("..." if len(prompt) > 80 else ""),
+ "model": job_model,
+ "job_number": processed + 1
+ }, ctx.obj['output_format'])
+
+ # Run inference through Ollama
+ start_time = time.time()
+ ollama_result = _run_ollama_inference(ollama_url, job_model, prompt)
+ duration_ms = int((time.time() - start_time) * 1000)
+
+ if "error" in ollama_result:
+ error(f"Ollama inference failed: {ollama_result['error']}")
+ # Submit failure
+ client.post(
+ f"{config.coordinator_url}/v1/miners/{job_id}/fail",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={"error_code": "INFERENCE_FAILED", "error_message": ollama_result['error'], "metrics": {}}
+ )
+ continue
+
+ # Submit successful result
+ result_response = client.post(
+ f"{config.coordinator_url}/v1/miners/{job_id}/result",
+ headers={
+ "Content-Type": "application/json",
+ "X-Api-Key": config.api_key or "",
+ "X-Miner-ID": miner_id
+ },
+ json={
+ "result": {
+ "response": ollama_result.get("response", ""),
+ "model": ollama_result.get("model", job_model),
+ "provider": "ollama",
+ "eval_count": ollama_result.get("eval_count", 0),
+ },
+ "metrics": {
+ "duration_ms": duration_ms,
+ "eval_count": ollama_result.get("eval_count", 0),
+ "eval_duration": ollama_result.get("eval_duration", 0),
+ "total_duration": ollama_result.get("total_duration", 0),
+ }
+ }
+ )
+
+ if result_response.status_code == 200:
+ success(f"Job {job_id} completed via Ollama ({duration_ms}ms)")
+ processed += 1
+ else:
+ error(f"Failed to submit result: {result_response.status_code}")
+
+ except Exception as e:
+ error(f"Error: {e}")
+ break
+
+ output({
+ "total_processed": processed,
+ "miner_id": miner_id,
+ "model": model,
+ "provider": "ollama"
+ }, ctx.obj['output_format'])
+
+
+@miner.command(name="concurrent-mine")
+@click.option("--workers", type=int, default=2, help="Number of concurrent workers")
+@click.option("--jobs", "total_jobs", type=int, default=5, help="Total jobs to process")
+@click.option("--miner-id", default="cli-miner", help="Miner ID")
+@click.pass_context
+def concurrent_mine(ctx, workers: int, total_jobs: int, miner_id: str):
+ """Mine with concurrent job processing"""
+ config = ctx.obj['config']
+
+ success(f"Starting concurrent mining: {workers} workers, {total_jobs} jobs")
+
+ completed = 0
+ failed = 0
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
+ remaining = total_jobs
+ while remaining > 0:
+ batch_size = min(remaining, workers)
+ futures = [
+ executor.submit(_process_single_job, config, miner_id, i)
+ for i in range(batch_size)
+ ]
+
+ for future in concurrent.futures.as_completed(futures):
+ result = future.result()
+ if result.get("status") == "completed":
+ completed += 1
+ remaining -= 1
+ output(result, ctx.obj['output_format'])
+ elif result.get("status") == "no_job":
+ time.sleep(2)
+ else:
+ failed += 1
+ remaining -= 1
+
+ output({
+ "status": "finished",
+ "completed": completed,
+ "failed": failed,
+ "workers": workers
+ }, ctx.obj['output_format'])
diff --git a/cli/aitbc_cli/commands/wallet.py.backup b/cli/aitbc_cli/commands/wallet.py.backup
new file mode 100755
index 00000000..a356cfe3
--- /dev/null
+++ b/cli/aitbc_cli/commands/wallet.py.backup
@@ -0,0 +1,2229 @@
+"""Wallet commands for AITBC CLI"""
+
+import click
+import httpx
+import json
+import os
+import shutil
+import yaml
+from pathlib import Path
+from typing import Optional, Dict, Any, List
+from datetime import datetime, timedelta
+from ..utils import output, error, success, encrypt_value, decrypt_value
+import getpass
+
+
+def _get_wallet_password(wallet_name: str) -> str:
+ """Get or prompt for wallet encryption password"""
+ # Try to get from keyring first
+ try:
+ import keyring
+
+ password = keyring.get_password("aitbc-wallet", wallet_name)
+ if password:
+ return password
+ except Exception:
+ pass
+
+ # Prompt for password
+ while True:
+ password = getpass.getpass(f"Enter password for wallet '{wallet_name}': ")
+ if not password:
+ error("Password cannot be empty")
+ continue
+
+ confirm = getpass.getpass("Confirm password: ")
+ if password != confirm:
+ error("Passwords do not match")
+ continue
+
+ # Store in keyring for future use
+ try:
+ import keyring
+
+ keyring.set_password("aitbc-wallet", wallet_name, password)
+ except Exception:
+ pass
+
+ return password
+
+
+def _save_wallet(wallet_path: Path, wallet_data: Dict[str, Any], password: str = None):
+ """Save wallet with encrypted private key"""
+ # Encrypt private key if provided
+ if password and "private_key" in wallet_data:
+ wallet_data["private_key"] = encrypt_value(wallet_data["private_key"], password)
+ wallet_data["encrypted"] = True
+
+ # Save wallet
+ with open(wallet_path, "w") as f:
+ json.dump(wallet_data, f, indent=2)
+
+
+def _load_wallet(wallet_path: Path, wallet_name: str) -> Dict[str, Any]:
+ """Load wallet and decrypt private key if needed"""
+ with open(wallet_path, "r") as f:
+ wallet_data = json.load(f)
+
+ # Decrypt private key if encrypted
+ if wallet_data.get("encrypted") and "private_key" in wallet_data:
+ password = _get_wallet_password(wallet_name)
+ try:
+ wallet_data["private_key"] = decrypt_value(
+ wallet_data["private_key"], password
+ )
+ except Exception:
+ error("Invalid password for wallet")
+ raise click.Abort()
+
+ return wallet_data
+
+
+@click.group()
+@click.option("--wallet-name", help="Name of the wallet to use")
+@click.option(
+ "--wallet-path", help="Direct path to wallet file (overrides --wallet-name)"
+)
+@click.option(
+ "--use-daemon", is_flag=True, help="Use wallet daemon for operations"
+)
+@click.pass_context
+def wallet(ctx, wallet_name: Optional[str], wallet_path: Optional[str], use_daemon: bool):
+ """Manage your AITBC wallets and transactions"""
+ # Ensure wallet object exists
+ ctx.ensure_object(dict)
+
+ # Store daemon mode preference
+ ctx.obj["use_daemon"] = use_daemon
+
+ # Initialize dual-mode adapter
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=use_daemon)
+ ctx.obj["wallet_adapter"] = adapter
+
+ # If direct wallet path is provided, use it
+ if wallet_path:
+ wp = Path(wallet_path)
+ wp.parent.mkdir(parents=True, exist_ok=True)
+ ctx.obj["wallet_name"] = wp.stem
+ ctx.obj["wallet_dir"] = wp.parent
+ ctx.obj["wallet_path"] = wp
+ return
+
+ # Set wallet directory
+ wallet_dir = Path.home() / ".aitbc" / "wallets"
+ wallet_dir.mkdir(parents=True, exist_ok=True)
+
+ # Set active wallet
+ if not wallet_name:
+ # Try to get from config or use 'default'
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+ config = None
+ if config_file.exists():
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f)
+ if config:
+ wallet_name = config.get("active_wallet", "default")
+ else:
+ wallet_name = "default"
+ else:
+ wallet_name = "default"
+ else:
+ # Load config for other operations
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+ config = None
+ if config_file.exists():
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f)
+
+ ctx.obj["wallet_name"] = wallet_name
+ ctx.obj["wallet_dir"] = wallet_dir
+ ctx.obj["wallet_path"] = wallet_dir / f"{wallet_name}.json"
+ ctx.obj["config"] = config
+
+
+@wallet.command()
+@click.argument("name")
+@click.option("--type", "wallet_type", default="hd", help="Wallet type (hd, simple)")
+@click.option(
+ "--no-encrypt", is_flag=True, help="Skip wallet encryption (not recommended)"
+)
+@click.pass_context
+def create(ctx, name: str, wallet_type: str, no_encrypt: bool):
+ """Create a new wallet"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ # Check if using daemon mode and daemon is available
+ if use_daemon and not adapter.is_daemon_available():
+ error("Wallet daemon is not available. Falling back to file-based wallet.")
+ # Switch to file mode
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=False)
+ ctx.obj["wallet_adapter"] = adapter
+
+ # Get password for encryption
+ password = None
+ if not no_encrypt:
+ if use_daemon:
+ # For daemon mode, use a default password or prompt
+ password = getpass.getpass(f"Enter password for wallet '{name}' (press Enter for default): ")
+ if not password:
+ password = "default_wallet_password"
+ else:
+ # For file mode, use existing password prompt logic
+ password = getpass.getpass(f"Enter password for wallet '{name}': ")
+ confirm = getpass.getpass("Confirm password: ")
+ if password != confirm:
+ error("Passwords do not match")
+ return
+
+ # Create wallet using the adapter
+ try:
+ metadata = {
+ "wallet_type": wallet_type,
+ "created_by": "aitbc_cli",
+ "encryption_enabled": not no_encrypt
+ }
+
+ wallet_info = adapter.create_wallet(name, password, wallet_type, metadata)
+
+ # Display results
+ output(wallet_info, ctx.obj.get("output_format", "table"))
+
+ # Set as active wallet if successful
+ if wallet_info:
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+ config_data = {}
+ if config_file.exists():
+ with open(config_file, "r") as f:
+ config_data = yaml.safe_load(f) or {}
+
+ config_data["active_wallet"] = name
+ config_file.parent.mkdir(parents=True, exist_ok=True)
+ with open(config_file, "w") as f:
+ yaml.dump(config_data, f)
+
+ success(f"Wallet '{name}' is now active")
+
+ except Exception as e:
+ error(f"Failed to create wallet: {str(e)}")
+ return
+
+
+@wallet.command()
+@click.pass_context
+def list(ctx):
+ """List all wallets"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ # Check if using daemon mode and daemon is available
+ if use_daemon and not adapter.is_daemon_available():
+ error("Wallet daemon is not available. Falling back to file-based wallet listing.")
+ # Switch to file mode
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=False)
+
+ try:
+ wallets = adapter.list_wallets()
+
+ if not wallets:
+ output({"wallets": [], "count": 0, "mode": "daemon" if use_daemon else "file"},
+ ctx.obj.get("output_format", "table"))
+ return
+
+ # Format output
+ wallet_list = []
+ for wallet in wallets:
+ wallet_info = {
+ "name": wallet.get("wallet_name"),
+ "address": wallet.get("address"),
+ "balance": wallet.get("balance", 0.0),
+ "type": wallet.get("wallet_type", "hd"),
+ "created_at": wallet.get("created_at"),
+ "mode": wallet.get("mode", "file")
+ }
+ wallet_list.append(wallet_info)
+
+ output_data = {
+ "wallets": wallet_list,
+ "count": len(wallet_list),
+ "mode": "daemon" if use_daemon else "file"
+ }
+
+ output(output_data, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to list wallets: {str(e)}")
+
+
+
+
+@wallet.command()
+@click.argument("name")
+@click.pass_context
+def switch(ctx, name: str):
+ """Switch to a different wallet"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ # Check if using daemon mode and daemon is available
+ if use_daemon and not adapter.is_daemon_available():
+ error("Wallet daemon is not available. Falling back to file-based wallet switching.")
+ # Switch to file mode
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=False)
+
+ # Check if wallet exists
+ wallet_info = adapter.get_wallet_info(name)
+ if not wallet_info:
+ error(f"Wallet '{name}' does not exist")
+ return
+
+ # Update config
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+ config = {}
+ if config_file.exists():
+ import yaml
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f) or {}
+
+ config["active_wallet"] = name
+ config_file.parent.mkdir(parents=True, exist_ok=True)
+ with open(config_file, "w") as f:
+ yaml.dump(config, f)
+
+ success(f"Switched to wallet: {name}")
+ output({
+ "active_wallet": name,
+ "mode": "daemon" if use_daemon else "file",
+ "wallet_info": wallet_info
+ }, ctx.obj.get("output_format", "table"))
+
+
+@wallet.command()
+@click.argument("name")
+@click.option("--confirm", is_flag=True, help="Skip confirmation prompt")
+@click.pass_context
+def delete(ctx, name: str, confirm: bool):
+ """Delete a wallet"""
+ wallet_dir = ctx.obj["wallet_dir"]
+ wallet_path = wallet_dir / f"{name}.json"
+
+ if not wallet_path.exists():
+ error(f"Wallet '{name}' does not exist")
+ return
+
+ if not confirm:
+ if not click.confirm(
+ f"Are you sure you want to delete wallet '{name}'? This cannot be undone."
+ ):
+ return
+
+ wallet_path.unlink()
+ success(f"Wallet '{name}' deleted")
+
+ # If deleted wallet was active, reset to default
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+ if config_file.exists():
+ import yaml
+
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f) or {}
+
+ if config.get("active_wallet") == name:
+ config["active_wallet"] = "default"
+ with open(config_file, "w") as f:
+ yaml.dump(config, f, default_flow_style=False)
+
+
+@wallet.command()
+@click.argument("name")
+@click.option("--destination", help="Destination path for backup file")
+@click.pass_context
+def backup(ctx, name: str, destination: Optional[str]):
+ """Backup a wallet"""
+ wallet_dir = ctx.obj["wallet_dir"]
+ wallet_path = wallet_dir / f"{name}.json"
+
+ if not wallet_path.exists():
+ error(f"Wallet '{name}' does not exist")
+ return
+
+ if not destination:
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ destination = f"{name}_backup_{timestamp}.json"
+
+ # Copy wallet file
+ shutil.copy2(wallet_path, destination)
+ success(f"Wallet '{name}' backed up to '{destination}'")
+ output(
+ {
+ "wallet": name,
+ "backup_path": destination,
+ "timestamp": datetime.utcnow().isoformat() + "Z",
+ }
+ )
+
+
+@wallet.command()
+@click.argument("backup_path")
+@click.argument("name")
+@click.option("--force", is_flag=True, help="Override existing wallet")
+@click.pass_context
+def restore(ctx, backup_path: str, name: str, force: bool):
+ """Restore a wallet from backup"""
+ wallet_dir = ctx.obj["wallet_dir"]
+ wallet_path = wallet_dir / f"{name}.json"
+
+ if wallet_path.exists() and not force:
+ error(f"Wallet '{name}' already exists. Use --force to override.")
+ return
+
+ if not Path(backup_path).exists():
+ error(f"Backup file '{backup_path}' not found")
+ return
+
+ # Load and verify backup
+ with open(backup_path, "r") as f:
+ wallet_data = json.load(f)
+
+ # Update wallet name if needed
+ wallet_data["wallet_id"] = name
+ wallet_data["restored_at"] = datetime.utcnow().isoformat() + "Z"
+
+ # Save restored wallet (preserve encryption state)
+ # If wallet was encrypted, we save it as-is (still encrypted with original password)
+ with open(wallet_path, "w") as f:
+ json.dump(wallet_data, f, indent=2)
+
+ success(f"Wallet '{name}' restored from backup")
+ output(
+ {
+ "wallet": name,
+ "restored_from": backup_path,
+ "address": wallet_data["address"],
+ }
+ )
+
+
+@wallet.command()
+@click.pass_context
+def info(ctx):
+ """Show current wallet information"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+ config_file = Path.home() / ".aitbc" / "config.yaml"
+
+ if not wallet_path.exists():
+ error(
+ f"Wallet '{wallet_name}' not found. Use 'aitbc wallet create' to create one."
+ )
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ # Get active wallet from config
+ active_wallet = "default"
+ if config_file.exists():
+ import yaml
+
+ with open(config_file, "r") as f:
+ config = yaml.safe_load(f)
+ active_wallet = config.get("active_wallet", "default")
+
+ wallet_info = {
+ "name": wallet_data.get("name", wallet_name),
+ "type": wallet_data.get("type", wallet_data.get("wallet_type", "simple")),
+ "address": wallet_data["address"],
+ "public_key": wallet_data.get("public_key", "N/A"),
+ "created_at": wallet_data["created_at"],
+ "active": wallet_data.get("name", wallet_name) == active_wallet,
+ "path": str(wallet_path),
+ }
+
+ if "balance" in wallet_data:
+ wallet_info["balance"] = wallet_data["balance"]
+
+ output(wallet_info, ctx.obj.get("output_format", "table"))
+
+
+@wallet.command()
+@click.pass_context
+def balance(ctx):
+ """Check wallet balance"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+ config = ctx.obj.get("config")
+
+ # Auto-create wallet if it doesn't exist
+ if not wallet_path.exists():
+ import secrets
+ from cryptography.hazmat.primitives import hashes
+ from cryptography.hazmat.primitives.asymmetric import ec
+ from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
+
+ # Generate proper key pair
+ private_key_bytes = secrets.token_bytes(32)
+ private_key = f"0x{private_key_bytes.hex()}"
+
+ # Derive public key from private key
+ priv_key = ec.derive_private_key(
+ int.from_bytes(private_key_bytes, "big"), ec.SECP256K1()
+ )
+ pub_key = priv_key.public_key()
+ pub_key_bytes = pub_key.public_bytes(
+ encoding=Encoding.X962, format=PublicFormat.UncompressedPoint
+ )
+ public_key = f"0x{pub_key_bytes.hex()}"
+
+ # Generate address from public key
+ digest = hashes.Hash(hashes.SHA256())
+ digest.update(pub_key_bytes)
+ address_hash = digest.finalize()
+ address = f"aitbc1{address_hash[:20].hex()}"
+
+ wallet_data = {
+ "wallet_id": wallet_name,
+ "type": "simple",
+ "address": address,
+ "public_key": public_key,
+ "private_key": private_key,
+ "created_at": datetime.utcnow().isoformat() + "Z",
+ "balance": 0.0,
+ "transactions": [],
+ }
+ wallet_path.parent.mkdir(parents=True, exist_ok=True)
+ # Auto-create with encryption
+ success("Creating new wallet with encryption enabled")
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(wallet_path, wallet_data, password)
+ else:
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ # Try to get balance from blockchain if available
+ if config:
+ try:
+ with httpx.Client() as client:
+ # Try multiple balance query methods
+ blockchain_balance = None
+
+ # Method 1: Try direct balance endpoint
+ try:
+ response = client.get(
+ f"{config.get('coordinator_url').rstrip('/')}/rpc/getBalance/{wallet_data['address']}?chain_id=ait-devnet",
+ timeout=5,
+ )
+ if response.status_code == 200:
+ result = response.json()
+ blockchain_balance = result.get("balance", 0)
+ except Exception:
+ pass
+
+ # Method 2: Try addresses list endpoint
+ if blockchain_balance is None:
+ try:
+ response = client.get(
+ f"{config.get('coordinator_url').rstrip('/')}/rpc/addresses?chain_id=ait-devnet",
+ timeout=5,
+ )
+ if response.status_code == 200:
+ addresses = response.json()
+ if isinstance(addresses, list):
+ for addr_info in addresses:
+ if addr_info.get("address") == wallet_data["address"]:
+ blockchain_balance = addr_info.get("balance", 0)
+ break
+ except Exception:
+ pass
+
+ # Method 3: Use faucet as balance check (last resort)
+ if blockchain_balance is None:
+ try:
+ response = client.post(
+ f"{config.get('coordinator_url').rstrip('/')}/rpc/admin/mintFaucet?chain_id=ait-devnet",
+ json={"address": wallet_data["address"], "amount": 1},
+ timeout=5,
+ )
+ if response.status_code == 200:
+ result = response.json()
+ blockchain_balance = result.get("balance", 0)
+ # Subtract the 1 we just added
+ if blockchain_balance > 0:
+ blockchain_balance -= 1
+ except Exception:
+ pass
+
+ # If we got a blockchain balance, show it
+ if blockchain_balance is not None:
+ output(
+ {
+ "wallet": wallet_name,
+ "address": wallet_data["address"],
+ "local_balance": wallet_data.get("balance", 0),
+ "blockchain_balance": blockchain_balance,
+ "synced": wallet_data.get("balance", 0) == blockchain_balance,
+ "note": "Blockchain balance synced" if wallet_data.get("balance", 0) == blockchain_balance else "Local and blockchain balances differ",
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+ return
+ except Exception:
+ pass
+
+ # Fallback to local balance only
+ output(
+ {
+ "wallet": wallet_name,
+ "address": wallet_data["address"],
+ "balance": wallet_data.get("balance", 0),
+ "note": "Local balance (blockchain balance queries unavailable)",
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.option("--limit", type=int, default=10, help="Number of transactions to show")
+@click.pass_context
+def history(ctx, limit: int):
+ """Show transaction history"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ transactions = wallet_data.get("transactions", [])[-limit:]
+
+ # Format transactions
+ formatted_txs = []
+ for tx in transactions:
+ formatted_txs.append(
+ {
+ "type": tx["type"],
+ "amount": tx["amount"],
+ "description": tx.get("description", ""),
+ "timestamp": tx["timestamp"],
+ }
+ )
+
+ output(
+ {
+ "wallet": wallet_name,
+ "address": wallet_data["address"],
+ "transactions": formatted_txs,
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.argument("amount", type=float)
+@click.argument("job_id")
+@click.option("--desc", help="Description of the work")
+@click.pass_context
+def earn(ctx, amount: float, job_id: str, desc: Optional[str]):
+ """Add earnings from completed job"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ # Add transaction
+ transaction = {
+ "type": "earn",
+ "amount": amount,
+ "job_id": job_id,
+ "description": desc or f"Job {job_id}",
+ "timestamp": datetime.now().isoformat(),
+ }
+
+ wallet_data["transactions"].append(transaction)
+ wallet_data["balance"] = wallet_data.get("balance", 0) + amount
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(wallet_path, wallet_data, password)
+
+ success(f"Earnings added: {amount} AITBC")
+ output(
+ {
+ "wallet": wallet_name,
+ "amount": amount,
+ "job_id": job_id,
+ "new_balance": wallet_data["balance"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.argument("amount", type=float)
+@click.argument("description")
+@click.pass_context
+def spend(ctx, amount: float, description: str):
+ """Spend AITBC"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ balance = wallet_data.get("balance", 0)
+ if balance < amount:
+ error(f"Insufficient balance. Available: {balance}, Required: {amount}")
+ ctx.exit(1)
+ return
+
+ # Add transaction
+ transaction = {
+ "type": "spend",
+ "amount": -amount,
+ "description": description,
+ "timestamp": datetime.now().isoformat(),
+ }
+
+ wallet_data["transactions"].append(transaction)
+ wallet_data["balance"] = balance - amount
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(wallet_path, wallet_data, password)
+
+ success(f"Spent: {amount} AITBC")
+ output(
+ {
+ "wallet": wallet_name,
+ "amount": amount,
+ "description": description,
+ "new_balance": wallet_data["balance"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.pass_context
+def address(ctx):
+ """Show wallet address"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ output(
+ {"wallet": wallet_name, "address": wallet_data["address"]},
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.argument("to_address")
+@click.argument("amount", type=float)
+@click.option("--description", help="Transaction description")
+@click.pass_context
+def send(ctx, to_address: str, amount: float, description: Optional[str]):
+ """Send AITBC to another address"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+ wallet_name = ctx.obj["wallet_name"]
+
+ # Check if using daemon mode and daemon is available
+ if use_daemon and not adapter.is_daemon_available():
+ error("Wallet daemon is not available. Falling back to file-based wallet send.")
+ # Switch to file mode
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=False)
+ ctx.obj["wallet_adapter"] = adapter
+
+ # Get password for transaction
+ password = getpass.getpass(f"Enter password for wallet '{wallet_name}': ")
+
+ try:
+ result = adapter.send_transaction(wallet_name, password, to_address, amount, description)
+
+ # Display results
+ output(result, ctx.obj.get("output_format", "table"))
+
+ # Update active wallet if successful
+ if result:
+ success(f"Transaction sent successfully")
+
+ except Exception as e:
+ error(f"Failed to send transaction: {str(e)}")
+ return
+
+
+@wallet.command()
+@click.pass_context
+def balance(ctx):
+ """Check wallet balance"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+ wallet_name = ctx.obj["wallet_name"]
+
+ # Check if using daemon mode and daemon is available
+ if use_daemon and not adapter.is_daemon_available():
+ error("Wallet daemon is not available. Falling back to file-based wallet balance.")
+ # Switch to file mode
+ from ..config import get_config
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ config = get_config()
+ adapter = DualModeWalletAdapter(config, use_daemon=False)
+ ctx.obj["wallet_adapter"] = adapter
+
+ try:
+ balance = adapter.get_wallet_balance(wallet_name)
+ wallet_info = adapter.get_wallet_info(wallet_name)
+
+ if balance is None:
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ output_data = {
+ "wallet_name": wallet_name,
+ "balance": balance,
+ "address": wallet_info.get("address") if wallet_info else None,
+ "mode": "daemon" if use_daemon else "file"
+ }
+
+ output(output_data, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to get wallet balance: {str(e)}")
+
+
+@wallet.group()
+def daemon():
+ """Wallet daemon management commands"""
+ pass
+
+
+@daemon.command()
+@click.pass_context
+def status(ctx):
+ """Check wallet daemon status"""
+ from ..config import get_config
+ from ..wallet_daemon_client import WalletDaemonClient
+
+ config = get_config()
+ client = WalletDaemonClient(config)
+
+ if client.is_available():
+ status_info = client.get_status()
+ success("Wallet daemon is available")
+ output(status_info, ctx.obj.get("output_format", "table"))
+ else:
+ error("Wallet daemon is not available")
+ output({
+ "status": "unavailable",
+ "wallet_url": config.wallet_url,
+ "suggestion": "Start the wallet daemon or check the configuration"
+ }, ctx.obj.get("output_format", "table"))
+
+
+@daemon.command()
+@click.pass_context
+def configure(ctx):
+ """Configure wallet daemon settings"""
+ from ..config import get_config
+
+ config = get_config()
+
+ output({
+ "wallet_url": config.wallet_url,
+ "timeout": getattr(config, 'timeout', 30),
+ "suggestion": "Use AITBC_WALLET_URL environment variable or config file to change settings"
+ }, ctx.obj.get("output_format", "table"))
+
+
+@wallet.command()
+@click.argument("wallet_name")
+@click.option("--password", help="Wallet password")
+@click.option("--new-password", help="New password for daemon wallet")
+@click.option("--force", is_flag=True, help="Force migration even if wallet exists")
+@click.pass_context
+def migrate_to_daemon(ctx, wallet_name: str, password: Optional[str], new_password: Optional[str], force: bool):
+ """Migrate a file-based wallet to daemon storage"""
+ from ..wallet_migration_service import WalletMigrationService
+ from ..config import get_config
+
+ config = get_config()
+ migration_service = WalletMigrationService(config)
+
+ if not migration_service.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ result = migration_service.migrate_to_daemon(wallet_name, password, new_password, force)
+ success(f"Migrated wallet '{wallet_name}' to daemon")
+ output(result, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to migrate wallet: {str(e)}")
+
+
+@wallet.command()
+@click.argument("wallet_name")
+@click.option("--password", help="Wallet password")
+@click.option("--new-password", help="New password for file wallet")
+@click.option("--force", is_flag=True, help="Force migration even if wallet exists")
+@click.pass_context
+def migrate_to_file(ctx, wallet_name: str, password: Optional[str], new_password: Optional[str], force: bool):
+ """Migrate a daemon-based wallet to file storage"""
+ from ..wallet_migration_service import WalletMigrationService
+ from ..config import get_config
+
+ config = get_config()
+ migration_service = WalletMigrationService(config)
+
+ if not migration_service.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ result = migration_service.migrate_to_file(wallet_name, password, new_password, force)
+ success(f"Migrated wallet '{wallet_name}' to file storage")
+ output(result, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to migrate wallet: {str(e)}")
+
+
+@wallet.command()
+@click.pass_context
+def migration_status(ctx):
+ """Show wallet migration status"""
+ from ..wallet_migration_service import WalletMigrationService
+ from ..config import get_config
+
+ config = get_config()
+ migration_service = WalletMigrationService(config)
+
+ try:
+ status = migration_service.get_migration_status()
+ output(status, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to get migration status: {str(e)}")
+def stats(ctx):
+ """Show wallet statistics"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ transactions = wallet_data.get("transactions", [])
+
+ # Calculate stats
+ total_earned = sum(
+ tx["amount"] for tx in transactions if tx["type"] == "earn" and tx["amount"] > 0
+ )
+ total_spent = sum(
+ abs(tx["amount"])
+ for tx in transactions
+ if tx["type"] in ["spend", "send"] and tx["amount"] < 0
+ )
+ jobs_completed = len([tx for tx in transactions if tx["type"] == "earn"])
+
+ output(
+ {
+ "wallet": wallet_name,
+ "address": wallet_data["address"],
+ "current_balance": wallet_data.get("balance", 0),
+ "total_earned": total_earned,
+ "total_spent": total_spent,
+ "jobs_completed": jobs_completed,
+ "transaction_count": len(transactions),
+ "wallet_created": wallet_data.get("created_at"),
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.argument("amount", type=float)
+@click.option("--duration", type=int, default=30, help="Staking duration in days")
+@click.pass_context
+def stake(ctx, amount: float, duration: int):
+ """Stake AITBC tokens"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ balance = wallet_data.get("balance", 0)
+ if balance < amount:
+ error(f"Insufficient balance. Available: {balance}, Required: {amount}")
+ ctx.exit(1)
+ return
+
+ # Record stake
+ stake_id = f"stake_{int(datetime.now().timestamp())}"
+ stake_record = {
+ "stake_id": stake_id,
+ "amount": amount,
+ "duration_days": duration,
+ "start_date": datetime.now().isoformat(),
+ "end_date": (datetime.now() + timedelta(days=duration)).isoformat(),
+ "status": "active",
+ "apy": 5.0 + (duration / 30) * 1.5, # Higher APY for longer stakes
+ }
+
+ staking = wallet_data.setdefault("staking", [])
+ staking.append(stake_record)
+ wallet_data["balance"] = balance - amount
+
+ # Add transaction
+ wallet_data["transactions"].append(
+ {
+ "type": "stake",
+ "amount": -amount,
+ "stake_id": stake_id,
+ "description": f"Staked {amount} AITBC for {duration} days",
+ "timestamp": datetime.now().isoformat(),
+ }
+ )
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(wallet_path, wallet_data, password)
+
+ success(f"Staked {amount} AITBC for {duration} days")
+ output(
+ {
+ "wallet": wallet_name,
+ "stake_id": stake_id,
+ "amount": amount,
+ "duration_days": duration,
+ "apy": stake_record["apy"],
+ "new_balance": wallet_data["balance"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.argument("stake_id")
+@click.pass_context
+def unstake(ctx, stake_id: str):
+ """Unstake AITBC tokens"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ staking = wallet_data.get("staking", [])
+ stake_record = next(
+ (s for s in staking if s["stake_id"] == stake_id and s["status"] == "active"),
+ None,
+ )
+
+ if not stake_record:
+ error(f"Active stake '{stake_id}' not found")
+ ctx.exit(1)
+ return
+
+ # Calculate rewards
+ start = datetime.fromisoformat(stake_record["start_date"])
+ days_staked = max(1, (datetime.now() - start).days)
+ daily_rate = stake_record["apy"] / 100 / 365
+ rewards = stake_record["amount"] * daily_rate * days_staked
+
+ # Return principal + rewards
+ returned = stake_record["amount"] + rewards
+ wallet_data["balance"] = wallet_data.get("balance", 0) + returned
+ stake_record["status"] = "completed"
+ stake_record["rewards"] = rewards
+ stake_record["completed_date"] = datetime.now().isoformat()
+
+ # Add transaction
+ wallet_data["transactions"].append(
+ {
+ "type": "unstake",
+ "amount": returned,
+ "stake_id": stake_id,
+ "rewards": rewards,
+ "description": f"Unstaked {stake_record['amount']} AITBC + {rewards:.4f} rewards",
+ "timestamp": datetime.now().isoformat(),
+ }
+ )
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(wallet_path, wallet_data, password)
+
+ success(f"Unstaked {stake_record['amount']} AITBC + {rewards:.4f} rewards")
+ output(
+ {
+ "wallet": wallet_name,
+ "stake_id": stake_id,
+ "principal": stake_record["amount"],
+ "rewards": rewards,
+ "total_returned": returned,
+ "days_staked": days_staked,
+ "new_balance": wallet_data["balance"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="staking-info")
+@click.pass_context
+def staking_info(ctx):
+ """Show staking information"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj["wallet_path"]
+
+ if not wallet_path.exists():
+ error(f"Wallet '{wallet_name}' not found")
+ return
+
+ wallet_data = _load_wallet(wallet_path, wallet_name)
+
+ staking = wallet_data.get("staking", [])
+ active_stakes = [s for s in staking if s["status"] == "active"]
+ completed_stakes = [s for s in staking if s["status"] == "completed"]
+
+ total_staked = sum(s["amount"] for s in active_stakes)
+ total_rewards = sum(s.get("rewards", 0) for s in completed_stakes)
+
+ output(
+ {
+ "wallet": wallet_name,
+ "total_staked": total_staked,
+ "total_rewards_earned": total_rewards,
+ "active_stakes": len(active_stakes),
+ "completed_stakes": len(completed_stakes),
+ "stakes": [
+ {
+ "stake_id": s["stake_id"],
+ "amount": s["amount"],
+ "apy": s["apy"],
+ "duration_days": s["duration_days"],
+ "status": s["status"],
+ "start_date": s["start_date"],
+ }
+ for s in staking
+ ],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="multisig-create")
+@click.argument("signers", nargs=-1, required=True)
+@click.option(
+ "--threshold", type=int, required=True, help="Required signatures to approve"
+)
+@click.option("--name", required=True, help="Multisig wallet name")
+@click.pass_context
+def multisig_create(ctx, signers: tuple, threshold: int, name: str):
+ """Create a multi-signature wallet"""
+ wallet_dir = ctx.obj.get("wallet_dir", Path.home() / ".aitbc" / "wallets")
+ wallet_dir.mkdir(parents=True, exist_ok=True)
+ multisig_path = wallet_dir / f"{name}_multisig.json"
+
+ if multisig_path.exists():
+ error(f"Multisig wallet '{name}' already exists")
+ return
+
+ if threshold > len(signers):
+ error(
+ f"Threshold ({threshold}) cannot exceed number of signers ({len(signers)})"
+ )
+ return
+
+ import secrets
+
+ multisig_data = {
+ "wallet_id": name,
+ "type": "multisig",
+ "address": f"aitbc1ms{secrets.token_hex(18)}",
+ "signers": list(signers),
+ "threshold": threshold,
+ "created_at": datetime.now().isoformat(),
+ "balance": 0.0,
+ "transactions": [],
+ "pending_transactions": [],
+ }
+
+ with open(multisig_path, "w") as f:
+ json.dump(multisig_data, f, indent=2)
+
+ success(f"Multisig wallet '{name}' created ({threshold}-of-{len(signers)})")
+ output(
+ {
+ "name": name,
+ "address": multisig_data["address"],
+ "signers": list(signers),
+ "threshold": threshold,
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="multisig-propose")
+@click.option("--wallet", "wallet_name", required=True, help="Multisig wallet name")
+@click.argument("to_address")
+@click.argument("amount", type=float)
+@click.option("--description", help="Transaction description")
+@click.pass_context
+def multisig_propose(
+ ctx, wallet_name: str, to_address: str, amount: float, description: Optional[str]
+):
+ """Propose a multisig transaction"""
+ wallet_dir = ctx.obj.get("wallet_dir", Path.home() / ".aitbc" / "wallets")
+ multisig_path = wallet_dir / f"{wallet_name}_multisig.json"
+
+ if not multisig_path.exists():
+ error(f"Multisig wallet '{wallet_name}' not found")
+ return
+
+ with open(multisig_path) as f:
+ ms_data = json.load(f)
+
+ if ms_data.get("balance", 0) < amount:
+ error(
+ f"Insufficient balance. Available: {ms_data['balance']}, Required: {amount}"
+ )
+ ctx.exit(1)
+ return
+
+ import secrets
+
+ tx_id = f"mstx_{secrets.token_hex(8)}"
+ pending_tx = {
+ "tx_id": tx_id,
+ "to": to_address,
+ "amount": amount,
+ "description": description or "",
+ "proposed_at": datetime.now().isoformat(),
+ "proposed_by": os.environ.get("USER", "unknown"),
+ "signatures": [],
+ "status": "pending",
+ }
+
+ ms_data.setdefault("pending_transactions", []).append(pending_tx)
+ with open(multisig_path, "w") as f:
+ json.dump(ms_data, f, indent=2)
+
+ success(f"Transaction proposed: {tx_id}")
+ output(
+ {
+ "tx_id": tx_id,
+ "to": to_address,
+ "amount": amount,
+ "signatures_needed": ms_data["threshold"],
+ "status": "pending",
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="multisig-challenge")
+@click.option("--wallet", "wallet_name", required=True, help="Multisig wallet name")
+@click.argument("tx_id")
+@click.pass_context
+def multisig_challenge(ctx, wallet_name: str, tx_id: str):
+ """Create a cryptographic challenge for multisig transaction signing"""
+ wallet_dir = ctx.obj.get("wallet_dir", Path.home() / ".aitbc" / "wallets")
+ multisig_path = wallet_dir / f"{wallet_name}_multisig.json"
+
+ if not multisig_path.exists():
+ error(f"Multisig wallet '{wallet_name}' not found")
+ return
+
+ with open(multisig_path) as f:
+ ms_data = json.load(f)
+
+ # Find pending transaction
+ pending = ms_data.get("pending_transactions", [])
+ tx = next(
+ (t for t in pending if t["tx_id"] == tx_id and t["status"] == "pending"), None
+ )
+
+ if not tx:
+ error(f"Pending transaction '{tx_id}' not found")
+ return
+
+ # Import crypto utilities
+ from ..utils.crypto_utils import multisig_security
+
+ try:
+ # Create signing request
+ signing_request = multisig_security.create_signing_request(tx, wallet_name)
+
+ output({
+ "tx_id": tx_id,
+ "wallet": wallet_name,
+ "challenge": signing_request["challenge"],
+ "nonce": signing_request["nonce"],
+ "message": signing_request["message"],
+ "instructions": [
+ "1. Copy the challenge string above",
+ "2. Sign it with your private key using: aitbc wallet sign-challenge ",
+ "3. Use the returned signature with: aitbc wallet multisig-sign --wallet --signer --signature "
+ ]
+ }, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to create challenge: {e}")
+
+
+@wallet.command(name="sign-challenge")
+@click.argument("challenge")
+@click.argument("private_key")
+@click.pass_context
+def sign_challenge(ctx, challenge: str, private_key: str):
+ """Sign a cryptographic challenge (for testing multisig)"""
+ from ..utils.crypto_utils import sign_challenge
+
+ try:
+ signature = sign_challenge(challenge, private_key)
+
+ output({
+ "challenge": challenge,
+ "signature": signature,
+ "message": "Use this signature with multisig-sign command"
+ }, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to sign challenge: {e}")
+
+
+@wallet.command(name="multisig-sign")
+@click.option("--wallet", "wallet_name", required=True, help="Multisig wallet name")
+@click.argument("tx_id")
+@click.option("--signer", required=True, help="Signer address")
+@click.option("--signature", required=True, help="Cryptographic signature (hex)")
+@click.pass_context
+def multisig_sign(ctx, wallet_name: str, tx_id: str, signer: str, signature: str):
+ """Sign a pending multisig transaction with cryptographic verification"""
+ wallet_dir = ctx.obj.get("wallet_dir", Path.home() / ".aitbc" / "wallets")
+ multisig_path = wallet_dir / f"{wallet_name}_multisig.json"
+
+ if not multisig_path.exists():
+ error(f"Multisig wallet '{wallet_name}' not found")
+ return
+
+ with open(multisig_path) as f:
+ ms_data = json.load(f)
+
+ if signer not in ms_data.get("signers", []):
+ error(f"'{signer}' is not an authorized signer")
+ ctx.exit(1)
+ return
+
+ # Import crypto utilities
+ from ..utils.crypto_utils import multisig_security
+
+ # Verify signature cryptographically
+ success, message = multisig_security.verify_and_add_signature(tx_id, signature, signer)
+ if not success:
+ error(f"Signature verification failed: {message}")
+ ctx.exit(1)
+ return
+
+ pending = ms_data.get("pending_transactions", [])
+ tx = next(
+ (t for t in pending if t["tx_id"] == tx_id and t["status"] == "pending"), None
+ )
+
+ if not tx:
+ error(f"Pending transaction '{tx_id}' not found")
+ ctx.exit(1)
+ return
+
+ # Check if already signed
+ for sig in tx.get("signatures", []):
+ if sig["signer"] == signer:
+ error(f"'{signer}' has already signed this transaction")
+ return
+
+ # Add cryptographic signature
+ if "signatures" not in tx:
+ tx["signatures"] = []
+
+ tx["signatures"].append({
+ "signer": signer,
+ "signature": signature,
+ "timestamp": datetime.now().isoformat()
+ })
+
+ # Check if threshold met
+ if len(tx["signatures"]) >= ms_data["threshold"]:
+ tx["status"] = "approved"
+ # Execute the transaction
+ ms_data["balance"] = ms_data.get("balance", 0) - tx["amount"]
+ ms_data["transactions"].append(
+ {
+ "type": "multisig_send",
+ "amount": -tx["amount"],
+ "to": tx["to"],
+ "tx_id": tx["tx_id"],
+ "signatures": tx["signatures"],
+ "timestamp": datetime.now().isoformat(),
+ }
+ )
+ success(f"Transaction {tx_id} approved and executed!")
+ else:
+ success(
+ f"Signed. {len(tx['signatures'])}/{ms_data['threshold']} signatures collected"
+ )
+
+ with open(multisig_path, "w") as f:
+ json.dump(ms_data, f, indent=2)
+
+ output(
+ {
+ "tx_id": tx_id,
+ "signatures": tx["signatures"],
+ "threshold": ms_data["threshold"],
+ "status": tx["status"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="liquidity-stake")
+@click.argument("amount", type=float)
+@click.option("--pool", default="main", help="Liquidity pool name")
+@click.option(
+ "--lock-days", type=int, default=0, help="Lock period in days (higher APY)"
+)
+@click.pass_context
+def liquidity_stake(ctx, amount: float, pool: str, lock_days: int):
+ """Stake tokens into a liquidity pool"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj.get("wallet_path")
+ if not wallet_path or not Path(wallet_path).exists():
+ error("Wallet not found")
+ ctx.exit(1)
+ return
+
+ wallet_data = _load_wallet(Path(wallet_path), wallet_name)
+
+ balance = wallet_data.get("balance", 0)
+ if balance < amount:
+ error(f"Insufficient balance. Available: {balance}, Required: {amount}")
+ ctx.exit(1)
+ return
+
+ # APY tiers based on lock period
+ if lock_days >= 90:
+ apy = 12.0
+ tier = "platinum"
+ elif lock_days >= 30:
+ apy = 8.0
+ tier = "gold"
+ elif lock_days >= 7:
+ apy = 5.0
+ tier = "silver"
+ else:
+ apy = 3.0
+ tier = "bronze"
+
+ import secrets
+
+ stake_id = f"liq_{secrets.token_hex(6)}"
+ now = datetime.now()
+
+ liq_record = {
+ "stake_id": stake_id,
+ "pool": pool,
+ "amount": amount,
+ "apy": apy,
+ "tier": tier,
+ "lock_days": lock_days,
+ "start_date": now.isoformat(),
+ "unlock_date": (now + timedelta(days=lock_days)).isoformat()
+ if lock_days > 0
+ else None,
+ "status": "active",
+ }
+
+ wallet_data.setdefault("liquidity", []).append(liq_record)
+ wallet_data["balance"] = balance - amount
+
+ wallet_data["transactions"].append(
+ {
+ "type": "liquidity_stake",
+ "amount": -amount,
+ "pool": pool,
+ "stake_id": stake_id,
+ "timestamp": now.isoformat(),
+ }
+ )
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(Path(wallet_path), wallet_data, password)
+
+ success(f"Staked {amount} AITBC into '{pool}' pool ({tier} tier, {apy}% APY)")
+ output(
+ {
+ "stake_id": stake_id,
+ "pool": pool,
+ "amount": amount,
+ "apy": apy,
+ "tier": tier,
+ "lock_days": lock_days,
+ "new_balance": wallet_data["balance"],
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command(name="liquidity-unstake")
+@click.argument("stake_id")
+@click.pass_context
+def liquidity_unstake(ctx, stake_id: str):
+ """Withdraw from a liquidity pool with rewards"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj.get("wallet_path")
+ if not wallet_path or not Path(wallet_path).exists():
+ error("Wallet not found")
+ ctx.exit(1)
+ return
+
+ wallet_data = _load_wallet(Path(wallet_path), wallet_name)
+
+ liquidity = wallet_data.get("liquidity", [])
+ record = next(
+ (r for r in liquidity if r["stake_id"] == stake_id and r["status"] == "active"),
+ None,
+ )
+
+ if not record:
+ error(f"Active liquidity stake '{stake_id}' not found")
+ ctx.exit(1)
+ return
+
+ # Check lock period
+ if record.get("unlock_date"):
+ unlock = datetime.fromisoformat(record["unlock_date"])
+ if datetime.now() < unlock:
+ error(f"Stake is locked until {record['unlock_date']}")
+ ctx.exit(1)
+ return
+
+ # Calculate rewards
+ start = datetime.fromisoformat(record["start_date"])
+ days_staked = max((datetime.now() - start).total_seconds() / 86400, 0.001)
+ rewards = record["amount"] * (record["apy"] / 100) * (days_staked / 365)
+ total = record["amount"] + rewards
+
+ record["status"] = "completed"
+ record["end_date"] = datetime.now().isoformat()
+ record["rewards"] = round(rewards, 6)
+
+ wallet_data["balance"] = wallet_data.get("balance", 0) + total
+
+ wallet_data["transactions"].append(
+ {
+ "type": "liquidity_unstake",
+ "amount": total,
+ "principal": record["amount"],
+ "rewards": round(rewards, 6),
+ "pool": record["pool"],
+ "stake_id": stake_id,
+ "timestamp": datetime.now().isoformat(),
+ }
+ )
+
+ # Save wallet with encryption
+ password = None
+ if wallet_data.get("encrypted"):
+ password = _get_wallet_password(wallet_name)
+ _save_wallet(Path(wallet_path), wallet_data, password)
+
+ success(
+ f"Withdrawn {total:.6f} AITBC (principal: {record['amount']}, rewards: {rewards:.6f})"
+ )
+ output(
+ {
+ "stake_id": stake_id,
+ "pool": record["pool"],
+ "principal": record["amount"],
+ "rewards": round(rewards, 6),
+ "total_returned": round(total, 6),
+ "days_staked": round(days_staked, 2),
+ "apy": record["apy"],
+ "new_balance": round(wallet_data["balance"], 6),
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+@wallet.command()
+@click.pass_context
+def rewards(ctx):
+ """View all earned rewards (staking + liquidity)"""
+ wallet_name = ctx.obj["wallet_name"]
+ wallet_path = ctx.obj.get("wallet_path")
+ if not wallet_path or not Path(wallet_path).exists():
+ error("Wallet not found")
+ ctx.exit(1)
+ return
+
+ wallet_data = _load_wallet(Path(wallet_path), wallet_name)
+
+ staking = wallet_data.get("staking", [])
+ liquidity = wallet_data.get("liquidity", [])
+
+ # Staking rewards
+ staking_rewards = sum(
+ s.get("rewards", 0) for s in staking if s.get("status") == "completed"
+ )
+ active_staking = sum(s["amount"] for s in staking if s.get("status") == "active")
+
+ # Liquidity rewards
+ liq_rewards = sum(
+ r.get("rewards", 0) for r in liquidity if r.get("status") == "completed"
+ )
+ active_liquidity = sum(
+ r["amount"] for r in liquidity if r.get("status") == "active"
+ )
+
+ # Estimate pending rewards for active positions
+ pending_staking = 0
+ for s in staking:
+ if s.get("status") == "active":
+ start = datetime.fromisoformat(s["start_date"])
+ days = max((datetime.now() - start).total_seconds() / 86400, 0)
+ pending_staking += s["amount"] * (s["apy"] / 100) * (days / 365)
+
+ pending_liquidity = 0
+ for r in liquidity:
+ if r.get("status") == "active":
+ start = datetime.fromisoformat(r["start_date"])
+ days = max((datetime.now() - start).total_seconds() / 86400, 0)
+ pending_liquidity += r["amount"] * (r["apy"] / 100) * (days / 365)
+
+ output(
+ {
+ "staking_rewards_earned": round(staking_rewards, 6),
+ "staking_rewards_pending": round(pending_staking, 6),
+ "staking_active_amount": active_staking,
+ "liquidity_rewards_earned": round(liq_rewards, 6),
+ "liquidity_rewards_pending": round(pending_liquidity, 6),
+ "liquidity_active_amount": active_liquidity,
+ "total_earned": round(staking_rewards + liq_rewards, 6),
+ "total_pending": round(pending_staking + pending_liquidity, 6),
+ "total_staked": active_staking + active_liquidity,
+ },
+ ctx.obj.get("output_format", "table"),
+ )
+
+
+# Multi-Chain Commands
+
+@wallet.group()
+def chain():
+ """Multi-chain wallet operations"""
+ pass
+
+
+@chain.command()
+@click.pass_context
+def list(ctx):
+ """List all blockchain chains"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ chains = adapter.list_chains()
+ output({
+ "chains": chains,
+ "count": len(chains),
+ "mode": "daemon"
+ }, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to list chains: {str(e)}")
+
+
+@chain.command()
+@click.argument("chain_id")
+@click.argument("name")
+@click.argument("coordinator_url")
+@click.argument("coordinator_api_key")
+@click.pass_context
+def create(ctx, chain_id: str, name: str, coordinator_url: str, coordinator_api_key: str):
+ """Create a new blockchain chain"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ chain = adapter.create_chain(chain_id, name, coordinator_url, coordinator_api_key)
+ if chain:
+ success(f"Created chain: {chain_id}")
+ output(chain, ctx.obj.get("output_format", "table"))
+ else:
+ error(f"Failed to create chain: {chain_id}")
+
+ except Exception as e:
+ error(f"Failed to create chain: {str(e)}")
+
+
+@chain.command()
+@click.pass_context
+def status(ctx):
+ """Get chain status and statistics"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ status = adapter.get_chain_status()
+ output(status, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to get chain status: {str(e)}")
+
+
+@chain.command()
+@click.argument("chain_id")
+@click.pass_context
+def wallets(ctx, chain_id: str):
+ """List wallets in a specific chain"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ wallets = adapter.list_wallets_in_chain(chain_id)
+ output({
+ "chain_id": chain_id,
+ "wallets": wallets,
+ "count": len(wallets),
+ "mode": "daemon"
+ }, ctx.obj.get("output_format", "table"))
+
+ except Exception as e:
+ error(f"Failed to list wallets in chain {chain_id}: {str(e)}")
+
+
+@chain.command()
+@click.argument("chain_id")
+@click.argument("wallet_name")
+@click.pass_context
+def info(ctx, chain_id: str, wallet_name: str):
+ """Get wallet information from a specific chain"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ wallet_info = adapter.get_wallet_info_in_chain(chain_id, wallet_name)
+ if wallet_info:
+ output(wallet_info, ctx.obj.get("output_format", "table"))
+ else:
+ error(f"Wallet '{wallet_name}' not found in chain '{chain_id}'")
+
+ except Exception as e:
+ error(f"Failed to get wallet info: {str(e)}")
+
+
+@chain.command()
+@click.argument("chain_id")
+@click.argument("wallet_name")
+@click.pass_context
+def balance(ctx, chain_id: str, wallet_name: str):
+ """Get wallet balance in a specific chain"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ balance = adapter.get_wallet_balance_in_chain(chain_id, wallet_name)
+ if balance is not None:
+ output({
+ "chain_id": chain_id,
+ "wallet_name": wallet_name,
+ "balance": balance,
+ "mode": "daemon"
+ }, ctx.obj.get("output_format", "table"))
+ else:
+ error(f"Could not get balance for wallet '{wallet_name}' in chain '{chain_id}'")
+
+ except Exception as e:
+ error(f"Failed to get wallet balance: {str(e)}")
+
+
+@chain.command()
+@click.argument("source_chain_id")
+@click.argument("target_chain_id")
+@click.argument("wallet_name")
+@click.option("--new-password", help="New password for target chain wallet")
+@click.pass_context
+def migrate(ctx, source_chain_id: str, target_chain_id: str, wallet_name: str, new_password: Optional[str]):
+ """Migrate a wallet from one chain to another"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ # Get password
+ import getpass
+ password = getpass.getpass(f"Enter password for wallet '{wallet_name}': ")
+
+ result = adapter.migrate_wallet(source_chain_id, target_chain_id, wallet_name, password, new_password)
+ if result:
+ success(f"Migrated wallet '{wallet_name}' from '{source_chain_id}' to '{target_chain_id}'")
+ output(result, ctx.obj.get("output_format", "table"))
+ else:
+ error(f"Failed to migrate wallet '{wallet_name}'")
+
+ except Exception as e:
+ error(f"Failed to migrate wallet: {str(e)}")
+
+
+@wallet.command()
+@click.argument("chain_id")
+@click.argument("wallet_name")
+@click.option("--type", "wallet_type", default="hd", help="Wallet type (hd, simple)")
+@click.option("--no-encrypt", is_flag=True, help="Skip wallet encryption (not recommended)")
+@click.pass_context
+def create_in_chain(ctx, chain_id: str, wallet_name: str, wallet_type: str, no_encrypt: bool):
+ """Create a wallet in a specific chain"""
+ adapter = ctx.obj["wallet_adapter"]
+ use_daemon = ctx.obj["use_daemon"]
+
+ if not use_daemon:
+ error("Chain operations require daemon mode. Use --use-daemon flag.")
+ return
+
+ if not adapter.is_daemon_available():
+ error("Wallet daemon is not available")
+ return
+
+ try:
+ # Get password
+ import getpass
+ if not no_encrypt:
+ password = getpass.getpass(f"Enter password for wallet '{wallet_name}': ")
+ confirm_password = getpass.getpass(f"Confirm password for wallet '{wallet_name}': ")
+ if password != confirm_password:
+ error("Passwords do not match")
+ return
+ else:
+ password = "insecure" # Default password for unencrypted wallets
+
+ metadata = {
+ "wallet_type": wallet_type,
+ "encrypted": not no_encrypt,
+ "created_at": datetime.now().isoformat()
+ }
+
+ result = adapter.create_wallet_in_chain(chain_id, wallet_name, password, wallet_type, metadata)
+ if result:
+ success(f"Created wallet '{wallet_name}' in chain '{chain_id}'")
+ output(result, ctx.obj.get("output_format", "table"))
+ else:
+ error(f"Failed to create wallet '{wallet_name}' in chain '{chain_id}'")
+
+ except Exception as e:
+ error(f"Failed to create wallet in chain: {str(e)}")
+
+
+@wallet.command()
+@click.option("--threshold", type=int, required=True, help="Number of signatures required")
+@click.option("--signers", multiple=True, required=True, help="Public keys of signers")
+@click.option("--wallet-name", help="Name for the multi-sig wallet")
+@click.option("--chain-id", help="Chain ID for multi-chain support")
+@click.pass_context
+def multisig_create(ctx, threshold: int, signers: tuple, wallet_name: Optional[str], chain_id: Optional[str]):
+ """Create a multi-signature wallet"""
+ config = ctx.obj.get('config')
+
+ if len(signers) < threshold:
+ error(f"Threshold {threshold} cannot be greater than number of signers {len(signers)}")
+ return
+
+ multisig_data = {
+ "threshold": threshold,
+ "signers": list(signers),
+ "wallet_name": wallet_name or f"multisig_{int(datetime.now().timestamp())}",
+ "created_at": datetime.utcnow().isoformat()
+ }
+
+ if chain_id:
+ multisig_data["chain_id"] = chain_id
+
+ try:
+ if ctx.obj.get("use_daemon"):
+ # Use wallet daemon for multi-sig creation
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ adapter = DualModeWalletAdapter(config)
+
+ result = adapter.create_multisig_wallet(
+ threshold=threshold,
+ signers=list(signers),
+ wallet_name=wallet_name,
+ chain_id=chain_id
+ )
+
+ if result:
+ success(f"Multi-sig wallet '{multisig_data['wallet_name']}' created!")
+ success(f"Threshold: {threshold}/{len(signers)}")
+ success(f"Signers: {len(signers)}")
+ output(result, ctx.obj.get('output_format', 'table'))
+ else:
+ error("Failed to create multi-sig wallet")
+ else:
+ # Local multi-sig wallet creation
+ wallet_dir = Path.home() / ".aitbc" / "wallets"
+ wallet_dir.mkdir(parents=True, exist_ok=True)
+
+ wallet_file = wallet_dir / f"{multisig_data['wallet_name']}.json"
+
+ if wallet_file.exists():
+ error(f"Wallet '{multisig_data['wallet_name']}' already exists")
+ return
+
+ # Save multi-sig wallet
+ with open(wallet_file, 'w') as f:
+ json.dump(multisig_data, f, indent=2)
+
+ success(f"Multi-sig wallet '{multisig_data['wallet_name']}' created!")
+ success(f"Threshold: {threshold}/{len(signers)}")
+ output(multisig_data, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to create multi-sig wallet: {e}")
+
+
+@wallet.command()
+@click.option("--amount", type=float, required=True, help="Transfer limit amount")
+@click.option("--period", default="daily", help="Limit period (hourly, daily, weekly)")
+@click.option("--wallet-name", help="Wallet to set limit for")
+@click.pass_context
+def set_limit(ctx, amount: float, period: str, wallet_name: Optional[str]):
+ """Set transfer limits for wallet"""
+ config = ctx.obj.get('config')
+
+ limit_data = {
+ "amount": amount,
+ "period": period,
+ "set_at": datetime.utcnow().isoformat()
+ }
+
+ try:
+ if ctx.obj.get("use_daemon"):
+ # Use wallet daemon
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ adapter = DualModeWalletAdapter(config)
+
+ result = adapter.set_transfer_limit(
+ amount=amount,
+ period=period,
+ wallet_name=wallet_name
+ )
+
+ if result:
+ success(f"Transfer limit set: {amount} {period}")
+ output(result, ctx.obj.get('output_format', 'table'))
+ else:
+ error("Failed to set transfer limit")
+ else:
+ # Local limit setting
+ limits_file = Path.home() / ".aitbc" / "transfer_limits.json"
+ limits_file.parent.mkdir(parents=True, exist_ok=True)
+
+ # Load existing limits
+ limits = {}
+ if limits_file.exists():
+ with open(limits_file, 'r') as f:
+ limits = json.load(f)
+
+ # Set new limit
+ wallet_key = wallet_name or "default"
+ limits[wallet_key] = limit_data
+
+ # Save limits
+ with open(limits_file, 'w') as f:
+ json.dump(limits, f, indent=2)
+
+ success(f"Transfer limit set for '{wallet_key}': {amount} {period}")
+ output(limit_data, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to set transfer limit: {e}")
+
+
+@wallet.command()
+@click.option("--amount", type=float, required=True, help="Amount to time-lock")
+@click.option("--duration", type=int, required=True, help="Lock duration in hours")
+@click.option("--recipient", required=True, help="Recipient address")
+@click.option("--wallet-name", help="Wallet to create time-lock from")
+@click.pass_context
+def time_lock(ctx, amount: float, duration: int, recipient: str, wallet_name: Optional[str]):
+ """Create a time-locked transfer"""
+ config = ctx.obj.get('config')
+
+ lock_data = {
+ "amount": amount,
+ "duration_hours": duration,
+ "recipient": recipient,
+ "wallet_name": wallet_name or "default",
+ "created_at": datetime.utcnow().isoformat(),
+ "unlock_time": (datetime.utcnow() + timedelta(hours=duration)).isoformat()
+ }
+
+ try:
+ if ctx.obj.get("use_daemon"):
+ # Use wallet daemon
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ adapter = DualModeWalletAdapter(config)
+
+ result = adapter.create_time_lock(
+ amount=amount,
+ duration_hours=duration,
+ recipient=recipient,
+ wallet_name=wallet_name
+ )
+
+ if result:
+ success(f"Time-locked transfer created: {amount} tokens")
+ success(f"Unlocks in: {duration} hours")
+ success(f"Recipient: {recipient}")
+ output(result, ctx.obj.get('output_format', 'table'))
+ else:
+ error("Failed to create time-lock")
+ else:
+ # Local time-lock creation
+ locks_file = Path.home() / ".aitbc" / "time_locks.json"
+ locks_file.parent.mkdir(parents=True, exist_ok=True)
+
+ # Load existing locks
+ locks = []
+ if locks_file.exists():
+ with open(locks_file, 'r') as f:
+ locks = json.load(f)
+
+ # Add new lock
+ locks.append(lock_data)
+
+ # Save locks
+ with open(locks_file, 'w') as f:
+ json.dump(locks, f, indent=2)
+
+ success(f"Time-locked transfer created: {amount} tokens")
+ success(f"Unlocks at: {lock_data['unlock_time']}")
+ success(f"Recipient: {recipient}")
+ output(lock_data, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to create time-lock: {e}")
+
+
+@wallet.command()
+@click.option("--wallet-name", help="Wallet to check limits for")
+@click.pass_context
+def check_limits(ctx, wallet_name: Optional[str]):
+ """Check transfer limits for wallet"""
+ limits_file = Path.home() / ".aitbc" / "transfer_limits.json"
+
+ if not limits_file.exists():
+ error("No transfer limits configured")
+ return
+
+ try:
+ with open(limits_file, 'r') as f:
+ limits = json.load(f)
+
+ wallet_key = wallet_name or "default"
+
+ if wallet_key not in limits:
+ error(f"No transfer limits configured for '{wallet_key}'")
+ return
+
+ limit_info = limits[wallet_key]
+ success(f"Transfer limits for '{wallet_key}':")
+ output(limit_info, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to check transfer limits: {e}")
+
+
+@wallet.command()
+@click.option("--wallet-name", help="Wallet to check locks for")
+@click.pass_context
+def list_time_locks(ctx, wallet_name: Optional[str]):
+ """List time-locked transfers"""
+ locks_file = Path.home() / ".aitbc" / "time_locks.json"
+
+ if not locks_file.exists():
+ error("No time-locked transfers found")
+ return
+
+ try:
+ with open(locks_file, 'r') as f:
+ locks = json.load(f)
+
+ # Filter by wallet if specified
+ if wallet_name:
+ locks = [lock for lock in locks if lock.get('wallet_name') == wallet_name]
+
+ if not locks:
+ error(f"No time-locked transfers found for '{wallet_name}'")
+ return
+
+ success(f"Time-locked transfers ({len(locks)} found):")
+ output({"time_locks": locks}, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to list time-locks: {e}")
+
+
+@wallet.command()
+@click.option("--wallet-name", help="Wallet name for audit")
+@click.option("--days", type=int, default=30, help="Number of days to audit")
+@click.pass_context
+def audit_trail(ctx, wallet_name: Optional[str], days: int):
+ """Generate wallet audit trail"""
+ config = ctx.obj.get('config')
+
+ audit_data = {
+ "wallet_name": wallet_name or "all",
+ "audit_period_days": days,
+ "generated_at": datetime.utcnow().isoformat()
+ }
+
+ try:
+ if ctx.obj.get("use_daemon"):
+ # Use wallet daemon for audit
+ from ..dual_mode_wallet_adapter import DualModeWalletAdapter
+ adapter = DualModeWalletAdapter(config)
+
+ result = adapter.get_audit_trail(
+ wallet_name=wallet_name,
+ days=days
+ )
+
+ if result:
+ success(f"Audit trail for '{wallet_name or 'all wallets'}':")
+ output(result, ctx.obj.get('output_format', 'table'))
+ else:
+ error("Failed to generate audit trail")
+ else:
+ # Local audit trail generation
+ audit_file = Path.home() / ".aitbc" / "audit_trail.json"
+ audit_file.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate sample audit data
+ cutoff_date = datetime.utcnow() - timedelta(days=days)
+
+ audit_data["transactions"] = []
+ audit_data["signatures"] = []
+ audit_data["limits"] = []
+ audit_data["time_locks"] = []
+
+ success(f"Audit trail generated for '{wallet_name or 'all wallets'}':")
+ output(audit_data, ctx.obj.get('output_format', 'table'))
+
+ except Exception as e:
+ error(f"Failed to generate audit trail: {e}")
diff --git a/cli/genesis_ait_devnet_proper.yaml b/cli/config/genesis_ait_devnet_proper.yaml
similarity index 100%
rename from cli/genesis_ait_devnet_proper.yaml
rename to cli/config/genesis_ait_devnet_proper.yaml
diff --git a/cli/genesis_multi_chain_dev.yaml b/cli/config/genesis_multi_chain_dev.yaml
similarity index 100%
rename from cli/genesis_multi_chain_dev.yaml
rename to cli/config/genesis_multi_chain_dev.yaml
diff --git a/cli/CLI_TEST_RESULTS.md b/cli/docs/CLI_TEST_RESULTS.md
similarity index 100%
rename from cli/CLI_TEST_RESULTS.md
rename to cli/docs/CLI_TEST_RESULTS.md
diff --git a/cli/CLI_WALLET_DAEMON_INTEGRATION_SUMMARY.md b/cli/docs/CLI_WALLET_DAEMON_INTEGRATION_SUMMARY.md
similarity index 100%
rename from cli/CLI_WALLET_DAEMON_INTEGRATION_SUMMARY.md
rename to cli/docs/CLI_WALLET_DAEMON_INTEGRATION_SUMMARY.md
diff --git a/cli/DEMONSTRATION_WALLET_CHAIN_CONNECTION.md b/cli/docs/DEMONSTRATION_WALLET_CHAIN_CONNECTION.md
similarity index 100%
rename from cli/DEMONSTRATION_WALLET_CHAIN_CONNECTION.md
rename to cli/docs/DEMONSTRATION_WALLET_CHAIN_CONNECTION.md
diff --git a/cli/IMPLEMENTATION_COMPLETE_SUMMARY.md b/cli/docs/IMPLEMENTATION_COMPLETE_SUMMARY.md
similarity index 100%
rename from cli/IMPLEMENTATION_COMPLETE_SUMMARY.md
rename to cli/docs/IMPLEMENTATION_COMPLETE_SUMMARY.md
diff --git a/cli/LOCALHOST_ONLY_ENFORCEMENT_SUMMARY.md b/cli/docs/LOCALHOST_ONLY_ENFORCEMENT_SUMMARY.md
similarity index 100%
rename from cli/LOCALHOST_ONLY_ENFORCEMENT_SUMMARY.md
rename to cli/docs/LOCALHOST_ONLY_ENFORCEMENT_SUMMARY.md
diff --git a/cli/WALLET_CHAIN_CONNECTION_SUMMARY.md b/cli/docs/WALLET_CHAIN_CONNECTION_SUMMARY.md
similarity index 100%
rename from cli/WALLET_CHAIN_CONNECTION_SUMMARY.md
rename to cli/docs/WALLET_CHAIN_CONNECTION_SUMMARY.md
diff --git a/cli/setup/requirements.txt b/cli/setup/requirements.txt
new file mode 100644
index 00000000..af96fa08
--- /dev/null
+++ b/cli/setup/requirements.txt
@@ -0,0 +1,12 @@
+click>=8.0.0
+httpx>=0.24.0
+pydantic>=1.10.0
+pyyaml>=6.0
+rich>=14.3.3
+keyring>=23.0.0
+cryptography>=3.4.8
+click-completion>=0.5.2
+tabulate>=0.9.0
+colorama>=0.4.4
+python-dotenv>=0.19.0
+aiohttp>=3.9.0
diff --git a/cli/setup/setup.py b/cli/setup/setup.py
new file mode 100755
index 00000000..16cd7a31
--- /dev/null
+++ b/cli/setup/setup.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+"""
+AITBC CLI Setup Script
+"""
+
+from setuptools import setup, find_packages
+import os
+
+# Read README file
+def read_readme():
+ with open("README.md", "r", encoding="utf-8") as fh:
+ return fh.read()
+
+# Read requirements
+def read_requirements():
+ with open("requirements.txt", "r", encoding="utf-8") as fh:
+ return [line.strip() for line in fh if line.strip() and not line.startswith("#")]
+
+setup(
+ name="aitbc-cli",
+ version="0.1.0",
+ author="AITBC Team",
+ author_email="team@aitbc.net",
+ description="AITBC Command Line Interface Tools",
+ long_description=read_readme(),
+ long_description_content_type="text/markdown",
+ url="https://aitbc.net",
+ project_urls={
+ "Homepage": "https://aitbc.net",
+ "Repository": "https://github.com/aitbc/aitbc",
+ "Documentation": "https://docs.aitbc.net",
+ },
+ packages=find_packages(),
+ classifiers=[
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Operating System :: OS Independent",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: System :: Distributed Computing",
+ ],
+ python_requires=">=3.13",
+ install_requires=read_requirements(),
+ extras_require={
+ "dev": [
+ "pytest>=7.0.0",
+ "pytest-asyncio>=0.21.0",
+ "pytest-cov>=4.0.0",
+ "pytest-mock>=3.10.0",
+ "black>=22.0.0",
+ "isort>=5.10.0",
+ "flake8>=5.0.0",
+ ],
+ },
+ entry_points={
+ "console_scripts": [
+ "aitbc=aitbc_cli.main:main",
+ ],
+ },
+ include_package_data=True,
+ package_data={
+ "aitbc_cli": ["*.yaml", "*.yml", "*.json"],
+ },
+ zip_safe=False,
+)
diff --git a/cli/test_cli_structure.py b/cli/tests/test_cli_structure.py
similarity index 100%
rename from cli/test_cli_structure.py
rename to cli/tests/test_cli_structure.py
diff --git a/cli/test_multichain_cli.py b/cli/tests/test_multichain_cli.py
similarity index 100%
rename from cli/test_multichain_cli.py
rename to cli/tests/test_multichain_cli.py
diff --git a/scripts/dev/ws_load_test.py.bak b/scripts/dev/ws_load_test.py.bak
new file mode 100644
index 00000000..db3d7b1d
--- /dev/null
+++ b/scripts/dev/ws_load_test.py.bak
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+import asyncio
+import json
+from contextlib import asynccontextmanager
+from typing import List
+
+import websockets
+
+DEFAULT_WS_URL = "ws://127.0.0.1:8000/rpc/ws"
+BLOCK_TOPIC = "/blocks"
+TRANSACTION_TOPIC = "/transactions"
+
+
+async def producer(ws_url: str, interval: float = 0.1, total: int = 100) -> None:
+ async with websockets.connect(f"{ws_url}{BLOCK_TOPIC}") as websocket:
+ for index in range(total):
+ payload = {
+ "height": index,
+ "hash": f"0x{index:064x}",
+ "parent_hash": f"0x{index-1:064x}",
+ "timestamp": "2025-01-01T00:00:00Z",
+ "tx_count": 0,
+ }
+ await websocket.send(json.dumps(payload))
+ await asyncio.sleep(interval)
+
+
+async def consumer(name: str, ws_url: str, path: str, duration: float = 5.0) -> None:
+ async with websockets.connect(f"{ws_url}{path}") as websocket:
+ end = asyncio.get_event_loop().time() + duration
+ received = 0
+ while asyncio.get_event_loop().time() < end:
+ try:
+ message = await asyncio.wait_for(websocket.recv(), timeout=1.0)
+ except asyncio.TimeoutError:
+ continue
+ received += 1
+ if received % 10 == 0:
+ print(f"[{name}] received {received} messages")
+ print(f"[{name}] total received: {received}")
+
+
+async def main() -> None:
+ ws_url = DEFAULT_WS_URL
+ consumers = [
+ consumer("blocks-consumer", ws_url, BLOCK_TOPIC),
+ consumer("tx-consumer", ws_url, TRANSACTION_TOPIC),
+ ]
+ await asyncio.gather(producer(ws_url), *consumers)
+
+
+if __name__ == "__main__":
+ asyncio.run(main())