refactor: flatten CLI directory structure - remove 'box in a box'
BEFORE: /opt/aitbc/cli/ ├── aitbc_cli/ # Python package (box in a box) │ ├── commands/ │ ├── main.py │ └── ... ├── setup.py AFTER: /opt/aitbc/cli/ # Flat structure ├── commands/ # Direct access ├── main.py # Direct access ├── auth/ ├── config/ ├── core/ ├── models/ ├── utils/ ├── plugins.py └── setup.py CHANGES MADE: - Moved all files from aitbc_cli/ to cli/ root - Fixed all relative imports (from . to absolute imports) - Updated setup.py entry point: aitbc_cli.main → main - Added CLI directory to Python path in entry script - Simplified deployment.py to remove dependency on deleted core.deployment - Fixed import paths in all command files - Recreated virtual environment with new structure BENEFITS: - Eliminated 'box in a box' nesting - Simpler directory structure - Direct access to all modules - Cleaner imports - Easier maintenance and development - CLI works with both 'python main.py' and 'aitbc' commands
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -1,378 +0,0 @@
|
||||
"""Production deployment and scaling commands for AITBC CLI"""
|
||||
|
||||
import click
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from ..core.deployment import (
|
||||
ProductionDeployment, ScalingPolicy, DeploymentStatus
|
||||
)
|
||||
from ..utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def deploy():
|
||||
"""Production deployment and scaling commands"""
|
||||
pass
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('name')
|
||||
@click.argument('environment')
|
||||
@click.argument('region')
|
||||
@click.argument('instance_type')
|
||||
@click.argument('min_instances', type=int)
|
||||
@click.argument('max_instances', type=int)
|
||||
@click.argument('desired_instances', type=int)
|
||||
@click.argument('port', type=int)
|
||||
@click.argument('domain')
|
||||
@click.option('--db-host', default='localhost', help='Database host')
|
||||
@click.option('--db-port', default=5432, help='Database port')
|
||||
@click.option('--db-name', default='aitbc', help='Database name')
|
||||
@click.pass_context
|
||||
def create(ctx, name, environment, region, instance_type, min_instances, max_instances, desired_instances, port, domain, db_host, db_port, db_name):
|
||||
"""Create a new deployment configuration"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Database configuration
|
||||
database_config = {
|
||||
"host": db_host,
|
||||
"port": db_port,
|
||||
"name": db_name,
|
||||
"ssl_enabled": True if environment == "production" else False
|
||||
}
|
||||
|
||||
# Create deployment
|
||||
deployment_id = asyncio.run(deployment.create_deployment(
|
||||
name=name,
|
||||
environment=environment,
|
||||
region=region,
|
||||
instance_type=instance_type,
|
||||
min_instances=min_instances,
|
||||
max_instances=max_instances,
|
||||
desired_instances=desired_instances,
|
||||
port=port,
|
||||
domain=domain,
|
||||
database_config=database_config
|
||||
))
|
||||
|
||||
if deployment_id:
|
||||
success(f"Deployment configuration created! ID: {deployment_id}")
|
||||
|
||||
deployment_data = {
|
||||
"Deployment ID": deployment_id,
|
||||
"Name": name,
|
||||
"Environment": environment,
|
||||
"Region": region,
|
||||
"Instance Type": instance_type,
|
||||
"Min Instances": min_instances,
|
||||
"Max Instances": max_instances,
|
||||
"Desired Instances": desired_instances,
|
||||
"Port": port,
|
||||
"Domain": domain,
|
||||
"Status": "pending",
|
||||
"Created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
|
||||
output(deployment_data, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error("Failed to create deployment configuration")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error creating deployment: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('deployment_id')
|
||||
@click.pass_context
|
||||
def start(ctx, deployment_id):
|
||||
"""Deploy the application to production"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Deploy application
|
||||
success_deploy = asyncio.run(deployment.deploy_application(deployment_id))
|
||||
|
||||
if success_deploy:
|
||||
success(f"Deployment {deployment_id} started successfully!")
|
||||
|
||||
deployment_data = {
|
||||
"Deployment ID": deployment_id,
|
||||
"Status": "running",
|
||||
"Started": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
|
||||
output(deployment_data, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to start deployment {deployment_id}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error starting deployment: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('deployment_id')
|
||||
@click.argument('target_instances', type=int)
|
||||
@click.option('--reason', default='manual', help='Scaling reason')
|
||||
@click.pass_context
|
||||
def scale(ctx, deployment_id, target_instances, reason):
|
||||
"""Scale a deployment to target instance count"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Scale deployment
|
||||
success_scale = asyncio.run(deployment.scale_deployment(deployment_id, target_instances, reason))
|
||||
|
||||
if success_scale:
|
||||
success(f"Deployment {deployment_id} scaled to {target_instances} instances!")
|
||||
|
||||
scaling_data = {
|
||||
"Deployment ID": deployment_id,
|
||||
"Target Instances": target_instances,
|
||||
"Reason": reason,
|
||||
"Status": "completed",
|
||||
"Scaled": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
|
||||
output(scaling_data, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to scale deployment {deployment_id}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error scaling deployment: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('deployment_id')
|
||||
@click.pass_context
|
||||
def status(ctx, deployment_id):
|
||||
"""Get comprehensive deployment status"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Get deployment status
|
||||
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
|
||||
|
||||
if not status_data:
|
||||
error(f"Deployment {deployment_id} not found")
|
||||
raise click.Abort()
|
||||
|
||||
# Format deployment info
|
||||
deployment_info = status_data["deployment"]
|
||||
info_data = [
|
||||
{"Metric": "Deployment ID", "Value": deployment_info["deployment_id"]},
|
||||
{"Metric": "Name", "Value": deployment_info["name"]},
|
||||
{"Metric": "Environment", "Value": deployment_info["environment"]},
|
||||
{"Metric": "Region", "Value": deployment_info["region"]},
|
||||
{"Metric": "Instance Type", "Value": deployment_info["instance_type"]},
|
||||
{"Metric": "Min Instances", "Value": deployment_info["min_instances"]},
|
||||
{"Metric": "Max Instances", "Value": deployment_info["max_instances"]},
|
||||
{"Metric": "Desired Instances", "Value": deployment_info["desired_instances"]},
|
||||
{"Metric": "Port", "Value": deployment_info["port"]},
|
||||
{"Metric": "Domain", "Value": deployment_info["domain"]},
|
||||
{"Metric": "Health Status", "Value": "Healthy" if status_data["health_status"] else "Unhealthy"},
|
||||
{"Metric": "Uptime", "Value": f"{status_data['uptime_percentage']:.2f}%"}
|
||||
]
|
||||
|
||||
output(info_data, ctx.obj.get('output_format', 'table'), title=f"Deployment Status: {deployment_id}")
|
||||
|
||||
# Show metrics if available
|
||||
if status_data["metrics"]:
|
||||
metrics = status_data["metrics"]
|
||||
metrics_data = [
|
||||
{"Metric": "CPU Usage", "Value": f"{metrics['cpu_usage']:.1f}%"},
|
||||
{"Metric": "Memory Usage", "Value": f"{metrics['memory_usage']:.1f}%"},
|
||||
{"Metric": "Disk Usage", "Value": f"{metrics['disk_usage']:.1f}%"},
|
||||
{"Metric": "Request Count", "Value": metrics['request_count']},
|
||||
{"Metric": "Error Rate", "Value": f"{metrics['error_rate']:.2f}%"},
|
||||
{"Metric": "Response Time", "Value": f"{metrics['response_time']:.1f}ms"},
|
||||
{"Metric": "Active Instances", "Value": metrics['active_instances']}
|
||||
]
|
||||
|
||||
output(metrics_data, ctx.obj.get('output_format', 'table'), title="Performance Metrics")
|
||||
|
||||
# Show recent scaling events
|
||||
if status_data["recent_scaling_events"]:
|
||||
events = status_data["recent_scaling_events"]
|
||||
events_data = [
|
||||
{
|
||||
"Event ID": event["event_id"][:8],
|
||||
"Type": event["scaling_type"],
|
||||
"From": event["old_instances"],
|
||||
"To": event["new_instances"],
|
||||
"Reason": event["trigger_reason"],
|
||||
"Success": "Yes" if event["success"] else "No",
|
||||
"Time": event["triggered_at"]
|
||||
}
|
||||
for event in events
|
||||
]
|
||||
|
||||
output(events_data, ctx.obj.get('output_format', 'table'), title="Recent Scaling Events")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error getting deployment status: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
|
||||
@click.pass_context
|
||||
def overview(ctx, format):
|
||||
"""Get overview of all deployments"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Get cluster overview
|
||||
overview_data = asyncio.run(deployment.get_cluster_overview())
|
||||
|
||||
if not overview_data:
|
||||
error("No deployment data available")
|
||||
raise click.Abort()
|
||||
|
||||
# Cluster metrics
|
||||
cluster_data = [
|
||||
{"Metric": "Total Deployments", "Value": overview_data["total_deployments"]},
|
||||
{"Metric": "Running Deployments", "Value": overview_data["running_deployments"]},
|
||||
{"Metric": "Total Instances", "Value": overview_data["total_instances"]},
|
||||
{"Metric": "Health Check Coverage", "Value": f"{overview_data['health_check_coverage']:.1%}"},
|
||||
{"Metric": "Recent Scaling Events", "Value": overview_data["recent_scaling_events"]},
|
||||
{"Metric": "Scaling Success Rate", "Value": f"{overview_data['successful_scaling_rate']:.1%}"}
|
||||
]
|
||||
|
||||
output(cluster_data, ctx.obj.get('output_format', format), title="Cluster Overview")
|
||||
|
||||
# Aggregate metrics
|
||||
if "aggregate_metrics" in overview_data:
|
||||
metrics = overview_data["aggregate_metrics"]
|
||||
metrics_data = [
|
||||
{"Metric": "Average CPU Usage", "Value": f"{metrics['total_cpu_usage']:.1f}%"},
|
||||
{"Metric": "Average Memory Usage", "Value": f"{metrics['total_memory_usage']:.1f}%"},
|
||||
{"Metric": "Average Disk Usage", "Value": f"{metrics['total_disk_usage']:.1f}%"},
|
||||
{"Metric": "Average Response Time", "Value": f"{metrics['average_response_time']:.1f}ms"},
|
||||
{"Metric": "Average Error Rate", "Value": f"{metrics['average_error_rate']:.2f}%"},
|
||||
{"Metric": "Average Uptime", "Value": f"{metrics['average_uptime']:.1f}%"}
|
||||
]
|
||||
|
||||
output(metrics_data, ctx.obj.get('output_format', format), title="Aggregate Performance Metrics")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error getting cluster overview: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('deployment_id')
|
||||
@click.option('--interval', default=60, help='Update interval in seconds')
|
||||
@click.pass_context
|
||||
def monitor(ctx, deployment_id, interval):
|
||||
"""Monitor deployment performance in real-time"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Real-time monitoring
|
||||
from rich.console import Console
|
||||
from rich.live import Live
|
||||
from rich.table import Table
|
||||
import time
|
||||
|
||||
console = Console()
|
||||
|
||||
def generate_monitor_table():
|
||||
try:
|
||||
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
|
||||
|
||||
if not status_data:
|
||||
return f"Deployment {deployment_id} not found"
|
||||
|
||||
deployment_info = status_data["deployment"]
|
||||
metrics = status_data.get("metrics")
|
||||
|
||||
table = Table(title=f"Deployment Monitor - {deployment_info['name']} ({deployment_id[:8]}) - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
table.add_column("Metric", style="cyan")
|
||||
table.add_column("Value", style="green")
|
||||
|
||||
table.add_row("Environment", deployment_info["environment"])
|
||||
table.add_row("Desired Instances", str(deployment_info["desired_instances"]))
|
||||
table.add_row("Health Status", "✅ Healthy" if status_data["health_status"] else "❌ Unhealthy")
|
||||
table.add_row("Uptime", f"{status_data['uptime_percentage']:.2f}%")
|
||||
|
||||
if metrics:
|
||||
table.add_row("CPU Usage", f"{metrics['cpu_usage']:.1f}%")
|
||||
table.add_row("Memory Usage", f"{metrics['memory_usage']:.1f}%")
|
||||
table.add_row("Disk Usage", f"{metrics['disk_usage']:.1f}%")
|
||||
table.add_row("Request Count", str(metrics['request_count']))
|
||||
table.add_row("Error Rate", f"{metrics['error_rate']:.2f}%")
|
||||
table.add_row("Response Time", f"{metrics['response_time']:.1f}ms")
|
||||
table.add_row("Active Instances", str(metrics['active_instances']))
|
||||
|
||||
return table
|
||||
except Exception as e:
|
||||
return f"Error getting deployment data: {e}"
|
||||
|
||||
with Live(generate_monitor_table(), refresh_per_second=1) as live:
|
||||
try:
|
||||
while True:
|
||||
live.update(generate_monitor_table())
|
||||
time.sleep(interval)
|
||||
except KeyboardInterrupt:
|
||||
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error during monitoring: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.argument('deployment_id')
|
||||
@click.pass_context
|
||||
def auto_scale(ctx, deployment_id):
|
||||
"""Trigger auto-scaling evaluation for a deployment"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Trigger auto-scaling
|
||||
success_auto = asyncio.run(deployment.auto_scale_deployment(deployment_id))
|
||||
|
||||
if success_auto:
|
||||
success(f"Auto-scaling evaluation completed for deployment {deployment_id}")
|
||||
else:
|
||||
error(f"Auto-scaling evaluation failed for deployment {deployment_id}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error in auto-scaling: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@deploy.command()
|
||||
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
|
||||
@click.pass_context
|
||||
def list_deployments(ctx, format):
|
||||
"""List all deployments"""
|
||||
try:
|
||||
deployment = ProductionDeployment()
|
||||
|
||||
# Get all deployment statuses
|
||||
deployments = []
|
||||
for deployment_id in deployment.deployments.keys():
|
||||
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
|
||||
if status_data:
|
||||
deployment_info = status_data["deployment"]
|
||||
deployments.append({
|
||||
"Deployment ID": deployment_info["deployment_id"][:8],
|
||||
"Name": deployment_info["name"],
|
||||
"Environment": deployment_info["environment"],
|
||||
"Instances": f"{deployment_info['desired_instances']}/{deployment_info['max_instances']}",
|
||||
"Status": "Running" if status_data["health_status"] else "Stopped",
|
||||
"Uptime": f"{status_data['uptime_percentage']:.1f}%",
|
||||
"Created": deployment_info["created_at"]
|
||||
})
|
||||
|
||||
if not deployments:
|
||||
output("No deployments found", ctx.obj.get('output_format', 'table'))
|
||||
return
|
||||
|
||||
output(deployments, ctx.obj.get('output_format', format), title="All Deployments")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error listing deployments: {str(e)}")
|
||||
raise click.Abort()
|
||||
@@ -1,637 +0,0 @@
|
||||
"""Miner commands for AITBC CLI"""
|
||||
|
||||
import click
|
||||
import httpx
|
||||
import json
|
||||
import time
|
||||
import concurrent.futures
|
||||
from typing import Optional, Dict, Any, List
|
||||
from ..utils import output, error, success
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.pass_context
|
||||
def miner(ctx):
|
||||
"""Register as miner and process jobs"""
|
||||
# Set role for miner commands - this will be used by parent context
|
||||
ctx.ensure_object(dict)
|
||||
# Set role at the highest level context (CLI root)
|
||||
ctx.find_root().detected_role = 'miner'
|
||||
|
||||
# If no subcommand was invoked, show help
|
||||
if ctx.invoked_subcommand is None:
|
||||
click.echo(ctx.get_help())
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--gpu", help="GPU model name")
|
||||
@click.option("--memory", type=int, help="GPU memory in GB")
|
||||
@click.option("--cuda-cores", type=int, help="Number of CUDA cores")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def register(ctx, gpu: Optional[str], memory: Optional[int],
|
||||
cuda_cores: Optional[int], miner_id: str):
|
||||
"""Register as a miner with the coordinator"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
# Build capabilities
|
||||
capabilities = {}
|
||||
if gpu:
|
||||
capabilities["gpu"] = {"model": gpu}
|
||||
if memory:
|
||||
if "gpu" not in capabilities:
|
||||
capabilities["gpu"] = {}
|
||||
capabilities["gpu"]["memory_gb"] = memory
|
||||
if cuda_cores:
|
||||
if "gpu" not in capabilities:
|
||||
capabilities["gpu"] = {}
|
||||
capabilities["gpu"]["cuda_cores"] = cuda_cores
|
||||
|
||||
# Default capabilities if none provided
|
||||
if not capabilities:
|
||||
capabilities = {
|
||||
"cpu": {"cores": 4},
|
||||
"memory": {"gb": 16}
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/register",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={"capabilities": capabilities}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
output({
|
||||
"miner_id": miner_id,
|
||||
"status": "registered",
|
||||
"capabilities": capabilities
|
||||
}, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to register: {response.status_code} - {response.text}")
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--wait", type=int, default=5, help="Max wait time in seconds")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def poll(ctx, wait: int, miner_id: str):
|
||||
"""Poll for a single job"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/poll",
|
||||
json={"max_wait_seconds": 5},
|
||||
headers={
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
timeout=wait + 5
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
if response.status_code == 204:
|
||||
output({"message": "No jobs available"}, ctx.obj['output_format'])
|
||||
else:
|
||||
job = response.json()
|
||||
if job:
|
||||
output(job, ctx.obj['output_format'])
|
||||
else:
|
||||
output({"message": "No jobs available"}, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to poll: {response.status_code}")
|
||||
except httpx.TimeoutException:
|
||||
output({"message": f"No jobs available within {wait} seconds"}, ctx.obj['output_format'])
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--jobs", type=int, default=1, help="Number of jobs to process")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def mine(ctx, jobs: int, miner_id: str):
|
||||
"""Mine continuously for specified number of jobs"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
processed = 0
|
||||
while processed < jobs:
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
# Poll for job
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/poll",
|
||||
json={"max_wait_seconds": 5},
|
||||
headers={
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
if response.status_code == 204:
|
||||
time.sleep(5)
|
||||
continue
|
||||
job = response.json()
|
||||
if job:
|
||||
job_id = job.get('job_id')
|
||||
output({
|
||||
"job_id": job_id,
|
||||
"status": "processing",
|
||||
"job_number": processed + 1
|
||||
}, ctx.obj['output_format'])
|
||||
|
||||
# Simulate processing (in real implementation, do actual work)
|
||||
time.sleep(2)
|
||||
|
||||
# Submit result
|
||||
result_response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{job_id}/result",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={
|
||||
"result": {"output": f"Processed job {job_id}"},
|
||||
"metrics": {}
|
||||
}
|
||||
)
|
||||
|
||||
if result_response.status_code == 200:
|
||||
success(f"Job {job_id} completed successfully")
|
||||
processed += 1
|
||||
else:
|
||||
error(f"Failed to submit result: {result_response.status_code}")
|
||||
else:
|
||||
# No job available, wait a bit
|
||||
time.sleep(5)
|
||||
else:
|
||||
error(f"Failed to poll: {response.status_code}")
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error: {e}")
|
||||
break
|
||||
|
||||
output({
|
||||
"total_processed": processed,
|
||||
"miner_id": miner_id
|
||||
}, ctx.obj['output_format'])
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def heartbeat(ctx, miner_id: str):
|
||||
"""Send heartbeat to coordinator"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/heartbeat",
|
||||
headers={
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={
|
||||
"inflight": 0,
|
||||
"status": "ONLINE",
|
||||
"metadata": {}
|
||||
}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
output({
|
||||
"miner_id": miner_id,
|
||||
"status": "heartbeat_sent",
|
||||
"timestamp": time.time()
|
||||
}, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to send heartbeat: {response.status_code}")
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def status(ctx, miner_id: str):
|
||||
"""Check miner status"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
# This would typically query a miner status endpoint
|
||||
# For now, we'll just show the miner info
|
||||
output({
|
||||
"miner_id": miner_id,
|
||||
"coordinator": config.coordinator_url,
|
||||
"status": "active"
|
||||
}, ctx.obj['output_format'])
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.option("--from-time", help="Filter from timestamp (ISO format)")
|
||||
@click.option("--to-time", help="Filter to timestamp (ISO format)")
|
||||
@click.pass_context
|
||||
def earnings(ctx, miner_id: str, from_time: Optional[str], to_time: Optional[str]):
|
||||
"""Show miner earnings"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
try:
|
||||
params = {"miner_id": miner_id}
|
||||
if from_time:
|
||||
params["from_time"] = from_time
|
||||
if to_time:
|
||||
params["to_time"] = to_time
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{miner_id}/earnings",
|
||||
params=params,
|
||||
headers={"X-Api-Key": config.api_key or ""}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
data = response.json()
|
||||
output(data, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to get earnings: {response.status_code}")
|
||||
ctx.exit(1)
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@miner.command(name="update-capabilities")
|
||||
@click.option("--gpu", help="GPU model name")
|
||||
@click.option("--memory", type=int, help="GPU memory in GB")
|
||||
@click.option("--cuda-cores", type=int, help="Number of CUDA cores")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def update_capabilities(ctx, gpu: Optional[str], memory: Optional[int],
|
||||
cuda_cores: Optional[int], miner_id: str):
|
||||
"""Update miner GPU capabilities"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
capabilities = {}
|
||||
if gpu:
|
||||
capabilities["gpu"] = {"model": gpu}
|
||||
if memory:
|
||||
if "gpu" not in capabilities:
|
||||
capabilities["gpu"] = {}
|
||||
capabilities["gpu"]["memory_gb"] = memory
|
||||
if cuda_cores:
|
||||
if "gpu" not in capabilities:
|
||||
capabilities["gpu"] = {}
|
||||
capabilities["gpu"]["cuda_cores"] = cuda_cores
|
||||
|
||||
if not capabilities:
|
||||
error("No capabilities specified. Use --gpu, --memory, or --cuda-cores.")
|
||||
return
|
||||
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.put(
|
||||
f"{config.coordinator_url}/v1/miners/{miner_id}/capabilities",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or ""
|
||||
},
|
||||
json={"capabilities": capabilities}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
output({
|
||||
"miner_id": miner_id,
|
||||
"status": "capabilities_updated",
|
||||
"capabilities": capabilities
|
||||
}, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to update capabilities: {response.status_code}")
|
||||
ctx.exit(1)
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.option("--force", is_flag=True, help="Force deregistration without confirmation")
|
||||
@click.pass_context
|
||||
def deregister(ctx, miner_id: str, force: bool):
|
||||
"""Deregister miner from the coordinator"""
|
||||
if not force:
|
||||
if not click.confirm(f"Deregister miner '{miner_id}'?"):
|
||||
click.echo("Cancelled.")
|
||||
return
|
||||
|
||||
config = ctx.obj['config']
|
||||
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.delete(
|
||||
f"{config.coordinator_url}/v1/miners/{miner_id}",
|
||||
headers={"X-Api-Key": config.api_key or ""}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
output({
|
||||
"miner_id": miner_id,
|
||||
"status": "deregistered"
|
||||
}, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to deregister: {response.status_code}")
|
||||
ctx.exit(1)
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@miner.command()
|
||||
@click.option("--limit", default=10, help="Number of jobs to show")
|
||||
@click.option("--type", "job_type", help="Filter by job type")
|
||||
@click.option("--min-reward", type=float, help="Minimum reward threshold")
|
||||
@click.option("--status", "job_status", help="Filter by status (pending, running, completed, failed)")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def jobs(ctx, limit: int, job_type: Optional[str], min_reward: Optional[float],
|
||||
job_status: Optional[str], miner_id: str):
|
||||
"""List miner jobs with filtering"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
try:
|
||||
params = {"limit": limit, "miner_id": miner_id}
|
||||
if job_type:
|
||||
params["type"] = job_type
|
||||
if min_reward is not None:
|
||||
params["min_reward"] = min_reward
|
||||
if job_status:
|
||||
params["status"] = job_status
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{miner_id}/jobs",
|
||||
params=params,
|
||||
headers={"X-Api-Key": config.api_key or ""}
|
||||
)
|
||||
|
||||
if response.status_code in (200, 204):
|
||||
data = response.json()
|
||||
output(data, ctx.obj['output_format'])
|
||||
else:
|
||||
error(f"Failed to get jobs: {response.status_code}")
|
||||
ctx.exit(1)
|
||||
except Exception as e:
|
||||
error(f"Network error: {e}")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
def _process_single_job(config, miner_id: str, worker_id: int) -> Dict[str, Any]:
|
||||
"""Process a single job (used by concurrent mine)"""
|
||||
try:
|
||||
with httpx.Client() as http_client:
|
||||
response = http_client.post(
|
||||
f"{config.coordinator_url}/v1/miners/poll",
|
||||
json={"max_wait_seconds": 5},
|
||||
headers={
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if response.status_code == 204:
|
||||
return {"worker": worker_id, "status": "no_job"}
|
||||
if response.status_code == 200:
|
||||
job = response.json()
|
||||
if job:
|
||||
job_id = job.get('job_id')
|
||||
time.sleep(2) # Simulate processing
|
||||
|
||||
result_response = http_client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{job_id}/result",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={"result": {"output": f"Processed by worker {worker_id}"}, "metrics": {}}
|
||||
)
|
||||
|
||||
return {
|
||||
"worker": worker_id,
|
||||
"job_id": job_id,
|
||||
"status": "completed" if result_response.status_code == 200 else "failed"
|
||||
}
|
||||
return {"worker": worker_id, "status": "no_job"}
|
||||
except Exception as e:
|
||||
return {"worker": worker_id, "status": "error", "error": str(e)}
|
||||
|
||||
|
||||
def _run_ollama_inference(ollama_url: str, model: str, prompt: str) -> Dict[str, Any]:
|
||||
"""Run inference through local Ollama instance"""
|
||||
try:
|
||||
with httpx.Client(timeout=120) as client:
|
||||
response = client.post(
|
||||
f"{ollama_url}/api/generate",
|
||||
json={
|
||||
"model": model,
|
||||
"prompt": prompt,
|
||||
"stream": False
|
||||
}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
return {
|
||||
"response": data.get("response", ""),
|
||||
"model": data.get("model", model),
|
||||
"total_duration": data.get("total_duration", 0),
|
||||
"eval_count": data.get("eval_count", 0),
|
||||
"eval_duration": data.get("eval_duration", 0),
|
||||
}
|
||||
else:
|
||||
return {"error": f"Ollama returned {response.status_code}"}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
@miner.command(name="mine-ollama")
|
||||
@click.option("--jobs", type=int, default=1, help="Number of jobs to process")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.option("--ollama-url", default="http://localhost:11434", help="Ollama API URL")
|
||||
@click.option("--model", default="gemma3:1b", help="Ollama model to use")
|
||||
@click.pass_context
|
||||
def mine_ollama(ctx, jobs: int, miner_id: str, ollama_url: str, model: str):
|
||||
"""Mine jobs using local Ollama for GPU inference"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
# Verify Ollama is reachable
|
||||
try:
|
||||
with httpx.Client(timeout=5) as client:
|
||||
resp = client.get(f"{ollama_url}/api/tags")
|
||||
if resp.status_code != 200:
|
||||
error(f"Cannot reach Ollama at {ollama_url}")
|
||||
return
|
||||
models = [m["name"] for m in resp.json().get("models", [])]
|
||||
if model not in models:
|
||||
error(f"Model '{model}' not found. Available: {', '.join(models)}")
|
||||
return
|
||||
success(f"Ollama connected: {ollama_url} | model: {model}")
|
||||
except Exception as e:
|
||||
error(f"Cannot connect to Ollama: {e}")
|
||||
return
|
||||
|
||||
processed = 0
|
||||
while processed < jobs:
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/poll",
|
||||
json={"max_wait_seconds": 10},
|
||||
headers={
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if response.status_code == 204:
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
if response.status_code != 200:
|
||||
error(f"Failed to poll: {response.status_code}")
|
||||
break
|
||||
|
||||
job = response.json()
|
||||
if not job:
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
job_id = job.get('job_id')
|
||||
payload = job.get('payload', {})
|
||||
prompt = payload.get('prompt', '')
|
||||
job_model = payload.get('model', model)
|
||||
|
||||
output({
|
||||
"job_id": job_id,
|
||||
"status": "processing",
|
||||
"prompt": prompt[:80] + ("..." if len(prompt) > 80 else ""),
|
||||
"model": job_model,
|
||||
"job_number": processed + 1
|
||||
}, ctx.obj['output_format'])
|
||||
|
||||
# Run inference through Ollama
|
||||
start_time = time.time()
|
||||
ollama_result = _run_ollama_inference(ollama_url, job_model, prompt)
|
||||
duration_ms = int((time.time() - start_time) * 1000)
|
||||
|
||||
if "error" in ollama_result:
|
||||
error(f"Ollama inference failed: {ollama_result['error']}")
|
||||
# Submit failure
|
||||
client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{job_id}/fail",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={"error_code": "INFERENCE_FAILED", "error_message": ollama_result['error'], "metrics": {}}
|
||||
)
|
||||
continue
|
||||
|
||||
# Submit successful result
|
||||
result_response = client.post(
|
||||
f"{config.coordinator_url}/v1/miners/{job_id}/result",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
"X-Api-Key": config.api_key or "",
|
||||
"X-Miner-ID": miner_id
|
||||
},
|
||||
json={
|
||||
"result": {
|
||||
"response": ollama_result.get("response", ""),
|
||||
"model": ollama_result.get("model", job_model),
|
||||
"provider": "ollama",
|
||||
"eval_count": ollama_result.get("eval_count", 0),
|
||||
},
|
||||
"metrics": {
|
||||
"duration_ms": duration_ms,
|
||||
"eval_count": ollama_result.get("eval_count", 0),
|
||||
"eval_duration": ollama_result.get("eval_duration", 0),
|
||||
"total_duration": ollama_result.get("total_duration", 0),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if result_response.status_code == 200:
|
||||
success(f"Job {job_id} completed via Ollama ({duration_ms}ms)")
|
||||
processed += 1
|
||||
else:
|
||||
error(f"Failed to submit result: {result_response.status_code}")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error: {e}")
|
||||
break
|
||||
|
||||
output({
|
||||
"total_processed": processed,
|
||||
"miner_id": miner_id,
|
||||
"model": model,
|
||||
"provider": "ollama"
|
||||
}, ctx.obj['output_format'])
|
||||
|
||||
|
||||
@miner.command(name="concurrent-mine")
|
||||
@click.option("--workers", type=int, default=2, help="Number of concurrent workers")
|
||||
@click.option("--jobs", "total_jobs", type=int, default=5, help="Total jobs to process")
|
||||
@click.option("--miner-id", default="cli-miner", help="Miner ID")
|
||||
@click.pass_context
|
||||
def concurrent_mine(ctx, workers: int, total_jobs: int, miner_id: str):
|
||||
"""Mine with concurrent job processing"""
|
||||
config = ctx.obj['config']
|
||||
|
||||
success(f"Starting concurrent mining: {workers} workers, {total_jobs} jobs")
|
||||
|
||||
completed = 0
|
||||
failed = 0
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
remaining = total_jobs
|
||||
while remaining > 0:
|
||||
batch_size = min(remaining, workers)
|
||||
futures = [
|
||||
executor.submit(_process_single_job, config, miner_id, i)
|
||||
for i in range(batch_size)
|
||||
]
|
||||
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
result = future.result()
|
||||
if result.get("status") == "completed":
|
||||
completed += 1
|
||||
remaining -= 1
|
||||
output(result, ctx.obj['output_format'])
|
||||
elif result.get("status") == "no_job":
|
||||
time.sleep(2)
|
||||
else:
|
||||
failed += 1
|
||||
remaining -= 1
|
||||
|
||||
output({
|
||||
"status": "finished",
|
||||
"completed": completed,
|
||||
"failed": failed,
|
||||
"workers": workers
|
||||
}, ctx.obj['output_format'])
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
import keyring
|
||||
import os
|
||||
from typing import Optional, Dict
|
||||
from ..utils import success, error, warning
|
||||
from utils import success, error, warning
|
||||
|
||||
|
||||
class AuthManager:
|
||||
@@ -4,7 +4,7 @@ import click
|
||||
import httpx
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -496,7 +496,7 @@ def backup(ctx):
|
||||
@click.pass_context
|
||||
def audit_log(ctx, limit: int, action_filter: Optional[str]):
|
||||
"""View audit log"""
|
||||
from ..utils import AuditLogger
|
||||
from utils import AuditLogger
|
||||
|
||||
logger = AuditLogger()
|
||||
entries = logger.get_logs(limit=limit, action_filter=action_filter)
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -7,7 +7,7 @@ import time
|
||||
import uuid
|
||||
from typing import Optional, Dict, Any, List
|
||||
from pathlib import Path
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -5,12 +5,12 @@ import asyncio
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from ..core.config import load_multichain_config
|
||||
from ..core.agent_communication import (
|
||||
from core.config import load_multichain_config
|
||||
from core.agent_communication import (
|
||||
CrossChainAgentCommunication, AgentInfo, AgentMessage,
|
||||
MessageType, AgentStatus
|
||||
)
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def agent_comm():
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -4,9 +4,9 @@ import click
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from ..core.config import load_multichain_config
|
||||
from ..core.analytics import ChainAnalytics
|
||||
from ..utils import output, error, success
|
||||
from core.config import load_multichain_config
|
||||
from core.analytics import ChainAnalytics
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def analytics():
|
||||
@@ -3,8 +3,8 @@
|
||||
import click
|
||||
import os
|
||||
from typing import Optional
|
||||
from ..auth import AuthManager
|
||||
from ..utils import output, success, error, warning
|
||||
from auth import AuthManager
|
||||
from utils import output, success, error, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -12,7 +12,7 @@ def _get_node_endpoint(ctx):
|
||||
return "http://127.0.0.1:8006" # Use new blockchain RPC port
|
||||
|
||||
from typing import Optional, List
|
||||
from ..utils import output, error
|
||||
from utils import output, error
|
||||
import os
|
||||
|
||||
|
||||
@@ -1016,7 +1016,7 @@ def verify_genesis(ctx, chain: str, genesis_hash: Optional[str], verify_signatur
|
||||
"""Verify genesis block integrity for a specific chain"""
|
||||
try:
|
||||
import httpx
|
||||
from ..utils import success
|
||||
from utils import success
|
||||
|
||||
with httpx.Client() as client:
|
||||
# Get genesis block for the specified chain
|
||||
@@ -1129,7 +1129,7 @@ def genesis_hash(ctx, chain: str):
|
||||
"""Get the genesis block hash for a specific chain"""
|
||||
try:
|
||||
import httpx
|
||||
from ..utils import success
|
||||
from utils import success
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import click
|
||||
from typing import Optional
|
||||
from ..core.chain_manager import ChainManager, ChainNotFoundError, NodeNotAvailableError
|
||||
from ..core.config import MultiChainConfig, load_multichain_config
|
||||
from ..models.chain import ChainType
|
||||
from ..utils import output, error, success
|
||||
from core.chain_manager import ChainManager, ChainNotFoundError, NodeNotAvailableError
|
||||
from core.config import MultiChainConfig, load_multichain_config
|
||||
from models.chain import ChainType
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def chain():
|
||||
@@ -200,7 +200,7 @@ def create(ctx, config_file, node, dry_run):
|
||||
"""Create a new chain from configuration file"""
|
||||
try:
|
||||
import yaml
|
||||
from ..models.chain import ChainConfig
|
||||
from models.chain import ChainConfig
|
||||
|
||||
config = load_multichain_config()
|
||||
chain_manager = ChainManager(config)
|
||||
@@ -5,7 +5,7 @@ import httpx
|
||||
import json
|
||||
import time
|
||||
from typing import Optional
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -348,7 +348,7 @@ def batch_submit(ctx, file_path: str, file_format: Optional[str], retries: int,
|
||||
"""Submit multiple jobs from a CSV or JSON file"""
|
||||
import csv
|
||||
from pathlib import Path
|
||||
from ..utils import progress_bar
|
||||
from utils import progress_bar
|
||||
|
||||
config = ctx.obj['config']
|
||||
path = Path(file_path)
|
||||
@@ -11,7 +11,7 @@ from typing import Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
|
||||
# Import compliance providers
|
||||
from aitbc_cli.kyc_aml_providers import submit_kyc_verification, check_kyc_status, perform_aml_screening
|
||||
from kyc_aml_providers import submit_kyc_verification, check_kyc_status, perform_aml_screening
|
||||
|
||||
@click.group()
|
||||
def compliance():
|
||||
@@ -8,8 +8,8 @@ import yaml
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
from ..config import get_config, Config
|
||||
from ..utils import output, error, success
|
||||
from config import get_config, Config
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -459,7 +459,7 @@ def delete(ctx, name: str):
|
||||
@click.pass_context
|
||||
def set_secret(ctx, key: str, value: str):
|
||||
"""Set an encrypted configuration value"""
|
||||
from ..utils import encrypt_value
|
||||
from utils import encrypt_value
|
||||
|
||||
config_dir = Path.home() / ".config" / "aitbc"
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -488,7 +488,7 @@ def set_secret(ctx, key: str, value: str):
|
||||
@click.pass_context
|
||||
def get_secret(ctx, key: str):
|
||||
"""Get a decrypted configuration value"""
|
||||
from ..utils import decrypt_value
|
||||
from utils import decrypt_value
|
||||
|
||||
secrets_file = Path.home() / ".config" / "aitbc" / "secrets.json"
|
||||
|
||||
@@ -5,8 +5,8 @@ import httpx
|
||||
import json
|
||||
from typing import Optional
|
||||
from tabulate import tabulate
|
||||
from ..config import get_config
|
||||
from ..utils import success, error, output
|
||||
from config import get_config
|
||||
from utils import success, error, output
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -9,8 +9,8 @@ import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Any
|
||||
from web3 import Web3
|
||||
from ..utils.blockchain import get_web3_connection, get_contract
|
||||
from ..utils.config import load_config
|
||||
from utils.blockchain import get_web3_connection, get_contract
|
||||
from utils.config import load_config
|
||||
|
||||
@click.group()
|
||||
def dao():
|
||||
91
cli/commands/deployment.py
Normal file
91
cli/commands/deployment.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Production deployment guidance for AITBC CLI"""
|
||||
|
||||
import click
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def deploy():
|
||||
"""Production deployment guidance and setup"""
|
||||
pass
|
||||
|
||||
@deploy.command()
|
||||
@click.option('--service', default='all', help='Service to deploy (all, coordinator, blockchain, marketplace)')
|
||||
@click.option('--environment', default='production', help='Deployment environment')
|
||||
def setup(service, environment):
|
||||
"""Get deployment setup instructions"""
|
||||
output(f"🚀 {environment.title()} Deployment Setup for {service.title()}", None)
|
||||
|
||||
instructions = {
|
||||
'coordinator': [
|
||||
"1. Install dependencies: pip install -r requirements.txt",
|
||||
"2. Set environment variables in .env file",
|
||||
"3. Run: python -m coordinator.main",
|
||||
"4. Configure nginx reverse proxy",
|
||||
"5. Set up SSL certificates"
|
||||
],
|
||||
'blockchain': [
|
||||
"1. Install blockchain node dependencies",
|
||||
"2. Initialize genesis block: aitbc genesis init",
|
||||
"3. Start node: python -m blockchain.node",
|
||||
"4. Configure peer connections",
|
||||
"5. Enable mining if needed"
|
||||
],
|
||||
'marketplace': [
|
||||
"1. Install marketplace dependencies",
|
||||
"2. Set up database: postgresql-setup.sh",
|
||||
"3. Run migrations: python -m marketplace.migrate",
|
||||
"4. Start service: python -m marketplace.main",
|
||||
"5. Configure GPU mining nodes"
|
||||
],
|
||||
'all': [
|
||||
"📋 Complete AITBC Platform Deployment:",
|
||||
"",
|
||||
"1. Prerequisites:",
|
||||
" - Python 3.13+",
|
||||
" - PostgreSQL 14+",
|
||||
" - Redis 6+",
|
||||
" - Docker (optional)",
|
||||
"",
|
||||
"2. Environment Setup:",
|
||||
" - Copy .env.example to .env",
|
||||
" - Configure database URLs",
|
||||
" - Set API keys and secrets",
|
||||
"",
|
||||
"3. Database Setup:",
|
||||
" - createdb aitbc",
|
||||
" - Run migrations: python manage.py migrate",
|
||||
"",
|
||||
"4. Service Deployment:",
|
||||
" - Coordinator: python -m coordinator.main",
|
||||
" - Blockchain: python -m blockchain.node",
|
||||
" - Marketplace: python -m marketplace.main",
|
||||
"",
|
||||
"5. Frontend Setup:",
|
||||
" - npm install",
|
||||
" - npm run build",
|
||||
" - Configure web server"
|
||||
]
|
||||
}
|
||||
|
||||
for step in instructions.get(service, instructions['all']):
|
||||
output(step, None)
|
||||
|
||||
output(f"\n💡 For detailed deployment guides, see: docs/deployment/{environment}.md", None)
|
||||
|
||||
@deploy.command()
|
||||
@click.option('--service', help='Service to check')
|
||||
def status(service):
|
||||
"""Check deployment status"""
|
||||
output(f"📊 Deployment Status Check for {service or 'All Services'}", None)
|
||||
|
||||
checks = [
|
||||
"Coordinator API: http://localhost:8000/health",
|
||||
"Blockchain Node: http://localhost:8006/status",
|
||||
"Marketplace: http://localhost:8014/health",
|
||||
"Wallet Service: http://localhost:8002/status"
|
||||
]
|
||||
|
||||
for check in checks:
|
||||
output(f" • {check}", None)
|
||||
|
||||
output("\n💡 Use curl or browser to check each endpoint", None)
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -7,8 +7,8 @@ import os
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
from ..utils import output, error, success, warning
|
||||
from ..config import get_config
|
||||
from utils import output, error, success, warning
|
||||
from config import get_config
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -4,7 +4,7 @@ import click
|
||||
import subprocess
|
||||
import json
|
||||
from typing import Optional, List
|
||||
from ..utils import output, error
|
||||
from utils import output, error
|
||||
|
||||
|
||||
def _get_explorer_endpoint(ctx):
|
||||
@@ -5,11 +5,11 @@ import json
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from ..core.genesis_generator import GenesisGenerator, GenesisValidationError
|
||||
from ..core.config import MultiChainConfig, load_multichain_config
|
||||
from ..models.chain import GenesisConfig
|
||||
from ..utils import output, error, success
|
||||
from .keystore import create_keystore_via_script
|
||||
from core.genesis_generator import GenesisGenerator, GenesisValidationError
|
||||
from core.config import MultiChainConfig, load_multichain_config
|
||||
from models.chain import GenesisConfig
|
||||
from utils import output, error, success
|
||||
from commands.keystore import create_keystore_via_script
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
@@ -141,7 +141,7 @@ def validate(ctx, genesis_file):
|
||||
with open(genesis_path, 'r') as f:
|
||||
genesis_data = json.load(f)
|
||||
|
||||
from ..models.chain import GenesisBlock
|
||||
from models.chain import GenesisBlock
|
||||
genesis_block = GenesisBlock(**genesis_data)
|
||||
|
||||
# Validate genesis block
|
||||
@@ -6,7 +6,7 @@ import hashlib
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -59,7 +59,7 @@ def status(agent_id, test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -557,7 +557,7 @@ def deployment_status(deployment_id, test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -8,7 +8,7 @@ import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
GOVERNANCE_DIR = Path.home() / ".aitbc" / "governance"
|
||||
@@ -7,7 +7,7 @@ import httpx
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -5,7 +5,7 @@ import httpx
|
||||
import json
|
||||
import asyncio
|
||||
from typing import Optional, List, Dict, Any
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
import os
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
import base64
|
||||
from typing import Optional, Dict, Any, List
|
||||
from pathlib import Path
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -6,12 +6,12 @@ import json
|
||||
from decimal import Decimal
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from ..core.config import load_multichain_config
|
||||
from ..core.marketplace import (
|
||||
from core.config import load_multichain_config
|
||||
from core.marketplace import (
|
||||
GlobalChainMarketplace, ChainType, MarketplaceStatus,
|
||||
TransactionStatus
|
||||
)
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def marketplace():
|
||||
@@ -6,7 +6,7 @@ import json
|
||||
import time
|
||||
import concurrent.futures
|
||||
from typing import Optional, Dict, Any, List
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@@ -7,7 +7,7 @@ import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, console
|
||||
from utils import output, error, success, console
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -53,7 +53,7 @@ def status(test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -7,7 +7,7 @@ import base64
|
||||
import mimetypes
|
||||
from typing import Optional, Dict, Any, List
|
||||
from pathlib import Path
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -7,7 +7,7 @@ import uuid
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
import click
|
||||
from typing import Optional
|
||||
from ..core.config import MultiChainConfig, load_multichain_config, get_default_node_config, add_node_config, remove_node_config
|
||||
from ..core.node_client import NodeClient
|
||||
from ..utils import output, error, success
|
||||
from core.config import MultiChainConfig, load_multichain_config, get_default_node_config, add_node_config, remove_node_config
|
||||
from core.node_client import NodeClient
|
||||
from utils import output, error, success
|
||||
|
||||
@click.group()
|
||||
def node():
|
||||
@@ -192,7 +192,7 @@ def add(ctx, node_id, endpoint, timeout, max_connections, retry_count):
|
||||
|
||||
config = add_node_config(config, node_config)
|
||||
|
||||
from ..core.config import save_multichain_config
|
||||
from core.config import save_multichain_config
|
||||
save_multichain_config(config)
|
||||
|
||||
success(f"Node {node_id} added successfully!")
|
||||
@@ -241,7 +241,7 @@ def remove(ctx, node_id, force):
|
||||
|
||||
config = remove_node_config(config, node_id)
|
||||
|
||||
from ..core.config import save_multichain_config
|
||||
from core.config import save_multichain_config
|
||||
save_multichain_config(config)
|
||||
|
||||
success(f"Node {node_id} removed successfully!")
|
||||
@@ -5,7 +5,7 @@ import httpx
|
||||
import json
|
||||
import time
|
||||
from typing import Optional, Dict, Any, List
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -5,7 +5,7 @@ import httpx
|
||||
import json
|
||||
import time
|
||||
from typing import Optional, Dict, Any, List
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -5,7 +5,7 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -59,7 +59,7 @@ def dashboard(plugin_id, days, test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -565,7 +565,7 @@ def download(plugin_id, license_key, test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -489,7 +489,7 @@ def status(test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -85,7 +85,7 @@ def status(test_mode):
|
||||
def get_config():
|
||||
"""Get CLI configuration"""
|
||||
try:
|
||||
from .config import get_config
|
||||
from config import get_config
|
||||
return get_config()
|
||||
except ImportError:
|
||||
# Fallback for testing
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
import random
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any
|
||||
from ..utils import output, error, success
|
||||
from utils import output, error, success
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -9,7 +9,7 @@ import asyncio
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from aitbc_cli.imports import ensure_coordinator_api_imports
|
||||
from imports import ensure_coordinator_api_imports
|
||||
|
||||
ensure_coordinator_api_imports()
|
||||
|
||||
@@ -4,7 +4,7 @@ import click
|
||||
import httpx
|
||||
import json
|
||||
from typing import Optional, Dict, Any, List
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -6,7 +6,7 @@ from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
from ..utils import success, error, run_subprocess
|
||||
from utils import success, error, run_subprocess
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -11,8 +11,8 @@ from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from ..utils import output, success, error, warning
|
||||
from ..config import get_config
|
||||
from utils import output, success, error, warning
|
||||
from config import get_config
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -118,7 +118,7 @@ def api(ctx, endpoint, method, data):
|
||||
@click.pass_context
|
||||
def wallet(ctx, wallet_name, test_operations):
|
||||
"""Test wallet functionality"""
|
||||
from ..commands.wallet import wallet as wallet_cmd
|
||||
from commands.wallet import wallet as wallet_cmd
|
||||
|
||||
output(f"Testing wallet functionality with wallet: {wallet_name}")
|
||||
|
||||
@@ -164,7 +164,7 @@ def wallet(ctx, wallet_name, test_operations):
|
||||
@click.pass_context
|
||||
def job(ctx, job_type, test_data):
|
||||
"""Test job submission and management"""
|
||||
from ..commands.client import client as client_cmd
|
||||
from commands.client import client as client_cmd
|
||||
|
||||
output(f"Testing job submission with type: {job_type}")
|
||||
|
||||
@@ -220,7 +220,7 @@ def job(ctx, job_type, test_data):
|
||||
@click.pass_context
|
||||
def marketplace(ctx, gpu_type, price):
|
||||
"""Test marketplace functionality"""
|
||||
from ..commands.marketplace import marketplace as marketplace_cmd
|
||||
from commands.marketplace import marketplace as marketplace_cmd
|
||||
|
||||
output(f"Testing marketplace functionality for {gpu_type} at {price} AITBC/hour")
|
||||
|
||||
@@ -252,7 +252,7 @@ def marketplace(ctx, gpu_type, price):
|
||||
@click.pass_context
|
||||
def blockchain(ctx, test_endpoints):
|
||||
"""Test blockchain functionality"""
|
||||
from ..commands.blockchain import blockchain as blockchain_cmd
|
||||
from commands.blockchain import blockchain as blockchain_cmd
|
||||
|
||||
output("Testing blockchain functionality")
|
||||
|
||||
@@ -5,7 +5,7 @@ import json
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, warning
|
||||
from utils import output, error, success, warning
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -9,7 +9,7 @@ import yaml
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime, timedelta
|
||||
from ..utils import output, error, success, encrypt_value, decrypt_value
|
||||
from utils import output, error, success, encrypt_value, decrypt_value
|
||||
import getpass
|
||||
|
||||
|
||||
@@ -124,8 +124,8 @@ def wallet(ctx, wallet_name: Optional[str], wallet_path: Optional[str], use_daem
|
||||
ctx.obj["use_daemon"] = use_daemon
|
||||
|
||||
# Initialize dual-mode adapter
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=use_daemon)
|
||||
@@ -188,8 +188,8 @@ def create(ctx, name: str, wallet_type: str, no_encrypt: bool):
|
||||
if use_daemon and not adapter.is_daemon_available():
|
||||
error("Wallet daemon is not available. Falling back to file-based wallet.")
|
||||
# Switch to file mode
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=False)
|
||||
ctx.obj["wallet_adapter"] = adapter
|
||||
@@ -254,8 +254,8 @@ def list(ctx):
|
||||
if use_daemon and not adapter.is_daemon_available():
|
||||
error("Wallet daemon is not available. Falling back to file-based wallet listing.")
|
||||
# Switch to file mode
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=False)
|
||||
|
||||
@@ -306,8 +306,8 @@ def switch(ctx, name: str):
|
||||
if use_daemon and not adapter.is_daemon_available():
|
||||
error("Wallet daemon is not available. Falling back to file-based wallet switching.")
|
||||
# Switch to file mode
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=False)
|
||||
|
||||
@@ -846,8 +846,8 @@ def send(ctx, to_address: str, amount: float, description: Optional[str]):
|
||||
if use_daemon and not adapter.is_daemon_available():
|
||||
error("Wallet daemon is not available. Falling back to file-based wallet send.")
|
||||
# Switch to file mode
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=False)
|
||||
ctx.obj["wallet_adapter"] = adapter
|
||||
@@ -882,8 +882,8 @@ def balance(ctx):
|
||||
if use_daemon and not adapter.is_daemon_available():
|
||||
error("Wallet daemon is not available. Falling back to file-based wallet balance.")
|
||||
# Switch to file mode
|
||||
from ..config import get_config
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from config import get_config
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
config = get_config()
|
||||
adapter = DualModeWalletAdapter(config, use_daemon=False)
|
||||
ctx.obj["wallet_adapter"] = adapter
|
||||
@@ -919,8 +919,8 @@ def daemon():
|
||||
@click.pass_context
|
||||
def status(ctx):
|
||||
"""Check wallet daemon status"""
|
||||
from ..config import get_config
|
||||
from ..wallet_daemon_client import WalletDaemonClient
|
||||
from config import get_config
|
||||
from wallet_daemon_client import WalletDaemonClient
|
||||
|
||||
config = get_config()
|
||||
client = WalletDaemonClient(config)
|
||||
@@ -942,7 +942,7 @@ def status(ctx):
|
||||
@click.pass_context
|
||||
def configure(ctx):
|
||||
"""Configure wallet daemon settings"""
|
||||
from ..config import get_config
|
||||
from config import get_config
|
||||
|
||||
config = get_config()
|
||||
|
||||
@@ -961,8 +961,8 @@ def configure(ctx):
|
||||
@click.pass_context
|
||||
def migrate_to_daemon(ctx, wallet_name: str, password: Optional[str], new_password: Optional[str], force: bool):
|
||||
"""Migrate a file-based wallet to daemon storage"""
|
||||
from ..wallet_migration_service import WalletMigrationService
|
||||
from ..config import get_config
|
||||
from wallet_migration_service import WalletMigrationService
|
||||
from config import get_config
|
||||
|
||||
config = get_config()
|
||||
migration_service = WalletMigrationService(config)
|
||||
@@ -988,8 +988,8 @@ def migrate_to_daemon(ctx, wallet_name: str, password: Optional[str], new_passwo
|
||||
@click.pass_context
|
||||
def migrate_to_file(ctx, wallet_name: str, password: Optional[str], new_password: Optional[str], force: bool):
|
||||
"""Migrate a daemon-based wallet to file storage"""
|
||||
from ..wallet_migration_service import WalletMigrationService
|
||||
from ..config import get_config
|
||||
from wallet_migration_service import WalletMigrationService
|
||||
from config import get_config
|
||||
|
||||
config = get_config()
|
||||
migration_service = WalletMigrationService(config)
|
||||
@@ -1011,8 +1011,8 @@ def migrate_to_file(ctx, wallet_name: str, password: Optional[str], new_password
|
||||
@click.pass_context
|
||||
def migration_status(ctx):
|
||||
"""Show wallet migration status"""
|
||||
from ..wallet_migration_service import WalletMigrationService
|
||||
from ..config import get_config
|
||||
from wallet_migration_service import WalletMigrationService
|
||||
from config import get_config
|
||||
|
||||
config = get_config()
|
||||
migration_service = WalletMigrationService(config)
|
||||
@@ -1452,7 +1452,7 @@ def multisig_challenge(ctx, wallet_name: str, tx_id: str):
|
||||
return
|
||||
|
||||
# Import crypto utilities
|
||||
from ..utils.crypto_utils import multisig_security
|
||||
from utils.crypto_utils import multisig_security
|
||||
|
||||
try:
|
||||
# Create signing request
|
||||
@@ -1481,7 +1481,7 @@ def multisig_challenge(ctx, wallet_name: str, tx_id: str):
|
||||
@click.pass_context
|
||||
def sign_challenge(ctx, challenge: str, private_key: str):
|
||||
"""Sign a cryptographic challenge (for testing multisig)"""
|
||||
from ..utils.crypto_utils import sign_challenge
|
||||
from utils.crypto_utils import sign_challenge
|
||||
|
||||
try:
|
||||
signature = sign_challenge(challenge, private_key)
|
||||
@@ -1520,7 +1520,7 @@ def multisig_sign(ctx, wallet_name: str, tx_id: str, signer: str, signature: str
|
||||
return
|
||||
|
||||
# Import crypto utilities
|
||||
from ..utils.crypto_utils import multisig_security
|
||||
from utils.crypto_utils import multisig_security
|
||||
|
||||
# Verify signature cryptographically
|
||||
success, message = multisig_security.verify_and_add_signature(tx_id, signature, signer)
|
||||
@@ -2112,7 +2112,7 @@ def multisig_create(ctx, threshold: int, signers: tuple, wallet_name: Optional[s
|
||||
try:
|
||||
if ctx.obj.get("use_daemon"):
|
||||
# Use wallet daemon for multi-sig creation
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
adapter = DualModeWalletAdapter(config)
|
||||
|
||||
result = adapter.create_multisig_wallet(
|
||||
@@ -2170,7 +2170,7 @@ def set_limit(ctx, amount: float, period: str, wallet_name: Optional[str]):
|
||||
try:
|
||||
if ctx.obj.get("use_daemon"):
|
||||
# Use wallet daemon
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
adapter = DualModeWalletAdapter(config)
|
||||
|
||||
result = adapter.set_transfer_limit(
|
||||
@@ -2232,7 +2232,7 @@ def time_lock(ctx, amount: float, duration: int, recipient: str, wallet_name: Op
|
||||
try:
|
||||
if ctx.obj.get("use_daemon"):
|
||||
# Use wallet daemon
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
adapter = DualModeWalletAdapter(config)
|
||||
|
||||
result = adapter.create_time_lock(
|
||||
@@ -2352,7 +2352,7 @@ def audit_trail(ctx, wallet_name: Optional[str], days: int):
|
||||
try:
|
||||
if ctx.obj.get("use_daemon"):
|
||||
# Use wallet daemon for audit
|
||||
from ..dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
from dual_mode_wallet_adapter import DualModeWalletAdapter
|
||||
adapter = DualModeWalletAdapter(config)
|
||||
|
||||
result = adapter.get_audit_trail(
|
||||
@@ -13,8 +13,8 @@ from enum import Enum
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
|
||||
from ..core.config import MultiChainConfig
|
||||
from ..core.node_client import NodeClient
|
||||
from core.config import MultiChainConfig
|
||||
from core.node_client import NodeClient
|
||||
|
||||
class MessageType(Enum):
|
||||
"""Agent message types"""
|
||||
@@ -11,9 +11,9 @@ from dataclasses import dataclass, asdict
|
||||
from collections import defaultdict, deque
|
||||
import statistics
|
||||
|
||||
from ..core.config import MultiChainConfig
|
||||
from ..core.node_client import NodeClient
|
||||
from ..models.chain import ChainInfo, ChainType, ChainStatus
|
||||
from core.config import MultiChainConfig
|
||||
from core.node_client import NodeClient
|
||||
from models.chain import ChainInfo, ChainType, ChainStatus
|
||||
|
||||
@dataclass
|
||||
class ChainMetrics:
|
||||
@@ -10,7 +10,7 @@ from pathlib import Path
|
||||
from typing import Dict, List, Optional, Any
|
||||
from .config import MultiChainConfig, get_node_config
|
||||
from .node_client import NodeClient
|
||||
from ..models.chain import (
|
||||
from models.chain import (
|
||||
ChainConfig, ChainInfo, ChainType, ChainStatus,
|
||||
GenesisBlock, ChainMigrationPlan, ChainMigrationResult,
|
||||
ChainBackupResult, ChainRestoreResult
|
||||
@@ -8,8 +8,8 @@ import yaml
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
from ..core.config import MultiChainConfig
|
||||
from ..models.chain import GenesisBlock, GenesisConfig, ChainType, ConsensusAlgorithm
|
||||
from core.config import MultiChainConfig
|
||||
from models.chain import GenesisBlock, GenesisConfig, ChainType, ConsensusAlgorithm
|
||||
|
||||
class GenesisValidationError(Exception):
|
||||
"""Genesis validation error"""
|
||||
@@ -14,8 +14,8 @@ import uuid
|
||||
from decimal import Decimal
|
||||
from collections import defaultdict
|
||||
|
||||
from ..core.config import MultiChainConfig
|
||||
from ..core.node_client import NodeClient
|
||||
from core.config import MultiChainConfig
|
||||
from core.node_client import NodeClient
|
||||
|
||||
class ChainType(Enum):
|
||||
"""Chain types in marketplace"""
|
||||
@@ -6,8 +6,8 @@ import asyncio
|
||||
import httpx
|
||||
import json
|
||||
from typing import Dict, List, Optional, Any
|
||||
from ..core.config import NodeConfig
|
||||
from ..models.chain import ChainInfo, ChainType, ChainStatus, ConsensusAlgorithm
|
||||
from core.config import NodeConfig
|
||||
from models.chain import ChainInfo, ChainType, ChainStatus, ConsensusAlgorithm
|
||||
|
||||
class NodeClient:
|
||||
"""Client for communicating with AITBC nodes"""
|
||||
@@ -244,7 +244,7 @@ class NodeClient:
|
||||
def _parse_chain_info(self, chain_data: Dict[str, Any]) -> ChainInfo:
|
||||
"""Parse chain data from node response"""
|
||||
from datetime import datetime
|
||||
from ..models.chain import PrivacyConfig
|
||||
from models.chain import PrivacyConfig
|
||||
|
||||
return ChainInfo(
|
||||
id=chain_data.get("chain_id", chain_data.get("id", "unknown")),
|
||||
@@ -297,7 +297,7 @@ class NodeClient:
|
||||
def _get_mock_chains(self) -> List[ChainInfo]:
|
||||
"""Get mock chains for development"""
|
||||
from datetime import datetime
|
||||
from ..models.chain import PrivacyConfig
|
||||
from models.chain import PrivacyConfig
|
||||
|
||||
return [
|
||||
ChainInfo(
|
||||
@@ -7,8 +7,8 @@ import click
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from . import __version__
|
||||
from .config import get_config
|
||||
from __init__ import __version__
|
||||
from config import get_config
|
||||
|
||||
|
||||
def with_role(role: str):
|
||||
@@ -20,58 +20,58 @@ def with_role(role: str):
|
||||
return func(ctx, *args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
from .utils import output, setup_logging
|
||||
from .commands.client import client
|
||||
from .commands.miner import miner
|
||||
from .commands.wallet import wallet
|
||||
from .commands.auth import auth
|
||||
from .commands.blockchain import blockchain
|
||||
from .commands.marketplace import marketplace
|
||||
from .commands.simulate import simulate
|
||||
from .commands.admin import admin
|
||||
from .commands.config import config
|
||||
from .commands.monitor import monitor
|
||||
from .commands.governance import governance
|
||||
from .commands.exchange import exchange
|
||||
from .commands.oracle import oracle
|
||||
from .commands.market_maker import market_maker
|
||||
from .commands.multisig import multisig
|
||||
from .commands.genesis_protection import genesis_protection
|
||||
from .commands.transfer_control import transfer_control
|
||||
from .commands.agent import agent
|
||||
from .commands.multimodal import multimodal
|
||||
from .commands.optimize import optimize
|
||||
# from .commands.openclaw import openclaw # Temporarily disabled due to naming conflict
|
||||
from .commands.marketplace_advanced import advanced # Re-enabled after fixing registration issues
|
||||
from .commands.swarm import swarm
|
||||
from .commands.chain import chain
|
||||
from .commands.genesis import genesis
|
||||
from .commands.keystore import keystore
|
||||
from .commands.test_cli import test
|
||||
from .commands.node import node
|
||||
from .commands.analytics import analytics
|
||||
from .commands.agent_comm import agent_comm
|
||||
from .commands.deployment import deploy
|
||||
from .commands.cross_chain import cross_chain
|
||||
from .commands.compliance import compliance
|
||||
from .commands.surveillance import surveillance
|
||||
from .commands.regulatory import regulatory
|
||||
from .commands.ai_trading import ai_trading
|
||||
from .commands.advanced_analytics import advanced_analytics_group
|
||||
from .commands.ai_surveillance import ai_surveillance_group
|
||||
from utils import output, setup_logging
|
||||
from commands.client import client
|
||||
from commands.miner import miner
|
||||
from commands.wallet import wallet
|
||||
from commands.auth import auth
|
||||
from commands.blockchain import blockchain
|
||||
from commands.marketplace import marketplace
|
||||
from commands.simulate import simulate
|
||||
from commands.admin import admin
|
||||
from commands.config import config
|
||||
from commands.monitor import monitor
|
||||
from commands.governance import governance
|
||||
from commands.exchange import exchange
|
||||
from commands.oracle import oracle
|
||||
from commands.market_maker import market_maker
|
||||
from commands.multisig import multisig
|
||||
from commands.genesis_protection import genesis_protection
|
||||
from commands.transfer_control import transfer_control
|
||||
from commands.agent import agent
|
||||
from commands.multimodal import multimodal
|
||||
from commands.optimize import optimize
|
||||
# from commands.openclaw import openclaw # Temporarily disabled due to naming conflict
|
||||
from commands.marketplace_advanced import advanced # Re-enabled after fixing registration issues
|
||||
from commands.swarm import swarm
|
||||
from commands.chain import chain
|
||||
from commands.genesis import genesis
|
||||
from commands.keystore import keystore
|
||||
from commands.test_cli import test
|
||||
from commands.node import node
|
||||
from commands.analytics import analytics
|
||||
from commands.agent_comm import agent_comm
|
||||
from commands.deployment import deploy
|
||||
from commands.cross_chain import cross_chain
|
||||
from commands.compliance import compliance
|
||||
from commands.surveillance import surveillance
|
||||
from commands.regulatory import regulatory
|
||||
from commands.ai_trading import ai_trading
|
||||
from commands.advanced_analytics import advanced_analytics_group
|
||||
from commands.ai_surveillance import ai_surveillance_group
|
||||
|
||||
# AI provider commands
|
||||
from .commands.ai import ai_group
|
||||
from commands.ai import ai_group
|
||||
|
||||
# Enterprise integration (optional)
|
||||
try:
|
||||
from .commands.enterprise_integration import enterprise_integration_group
|
||||
from commands.enterprise_integration import enterprise_integration_group
|
||||
except ImportError:
|
||||
enterprise_integration_group = None
|
||||
|
||||
from .commands.sync import sync
|
||||
from .commands.explorer import explorer
|
||||
from .plugins import plugin, load_plugins
|
||||
from commands.sync import sync
|
||||
from commands.explorer import explorer
|
||||
from plugins import plugin, load_plugins
|
||||
|
||||
|
||||
@click.group()
|
||||
@@ -57,12 +57,12 @@ setup(
|
||||
},
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"aitbc=aitbc_cli.main:main",
|
||||
"aitbc=main:main",
|
||||
],
|
||||
},
|
||||
include_package_data=True,
|
||||
package_data={
|
||||
"aitbc_cli": ["*.yaml", "*.yml", "*.json"],
|
||||
"": ["*.yaml", "*.yml", "*.json"],
|
||||
},
|
||||
zip_safe=False,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user