chore: remove configuration files and enhance blockchain explorer with advanced search, analytics, and export features

- Delete .aitbc.yaml.example CLI configuration template
- Delete .lycheeignore link checker exclusion rules
- Delete .nvmrc Node.js version specification
- Add advanced search panel with filters for address, amount range, transaction type, time range, and validator
- Add analytics dashboard with transaction volume, active addresses, and block time metrics
- Add Chart.js integration
This commit is contained in:
oib
2026-03-02 15:38:25 +01:00
parent af185cdd8b
commit ccedbace53
271 changed files with 35942 additions and 2359 deletions

View File

@@ -22,7 +22,7 @@ def status(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/status",
f"{config.coordinator_url}/admin/status",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -52,7 +52,7 @@ def jobs(ctx, limit: int, status: Optional[str]):
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/jobs",
f"{config.coordinator_url}/admin/jobs",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -77,7 +77,7 @@ def job_details(ctx, job_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/jobs/{job_id}",
f"{config.coordinator_url}/admin/jobs/{job_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -104,7 +104,7 @@ def delete_job(ctx, job_id: str):
try:
with httpx.Client() as client:
response = client.delete(
f"{config.coordinator_url}/v1/admin/jobs/{job_id}",
f"{config.coordinator_url}/admin/jobs/{job_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -133,7 +133,7 @@ def miners(ctx, limit: int, status: Optional[str]):
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/miners",
f"{config.coordinator_url}/admin/miners",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -158,7 +158,7 @@ def miner_details(ctx, miner_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/miners/{miner_id}",
f"{config.coordinator_url}/admin/miners/{miner_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -185,7 +185,7 @@ def deactivate_miner(ctx, miner_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/miners/{miner_id}/deactivate",
f"{config.coordinator_url}/admin/miners/{miner_id}/deactivate",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -209,7 +209,7 @@ def activate_miner(ctx, miner_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/miners/{miner_id}/activate",
f"{config.coordinator_url}/admin/miners/{miner_id}/activate",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -233,7 +233,7 @@ def analytics(ctx, days: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/analytics",
f"{config.coordinator_url}/admin/analytics",
params={"days": days},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -259,7 +259,7 @@ def logs(ctx, level: str, limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/admin/logs",
f"{config.coordinator_url}/admin/logs",
params={"level": level, "limit": limit},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -285,7 +285,7 @@ def prioritize_job(ctx, job_id: str, reason: Optional[str]):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/jobs/{job_id}/prioritize",
f"{config.coordinator_url}/admin/jobs/{job_id}/prioritize",
json={"reason": reason or "Admin priority"},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -324,7 +324,7 @@ def execute(ctx, action: str, target: Optional[str], data: Optional[str]):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/execute/{action}",
f"{config.coordinator_url}/admin/execute/{action}",
json=parsed_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -357,7 +357,7 @@ def cleanup(ctx):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/maintenance/cleanup",
f"{config.coordinator_url}/admin/maintenance/cleanup",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -384,7 +384,7 @@ def reindex(ctx):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/maintenance/reindex",
f"{config.coordinator_url}/admin/maintenance/reindex",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -408,7 +408,7 @@ def backup(ctx):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/admin/maintenance/backup",
f"{config.coordinator_url}/admin/maintenance/backup",
headers={"X-Api-Key": config.api_key or ""}
)

View File

@@ -50,7 +50,7 @@ def create(ctx, name: str, description: str, workflow_file, verification: str,
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/workflows",
f"{config.coordinator_url}/agents/workflows",
headers={"X-Api-Key": config.api_key or ""},
json=workflow_data
)
@@ -94,7 +94,7 @@ def list(ctx, agent_type: Optional[str], status: Optional[str],
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/workflows",
f"{config.coordinator_url}/agents/workflows",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -141,7 +141,7 @@ def execute(ctx, agent_id: str, inputs, verification: str, priority: str, timeou
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/{agent_id}/execute",
f"{config.coordinator_url}/agents/{agent_id}/execute",
headers={"X-Api-Key": config.api_key or ""},
json=execution_data
)
@@ -173,7 +173,7 @@ def status(ctx, execution_id: str, watch: bool, interval: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/executions/{execution_id}",
f"{config.coordinator_url}/agents/executions/{execution_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -219,7 +219,7 @@ def receipt(ctx, execution_id: str, verify: bool, download: Optional[str]):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/executions/{execution_id}/receipt",
f"{config.coordinator_url}/agents/executions/{execution_id}/receipt",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -229,7 +229,7 @@ def receipt(ctx, execution_id: str, verify: bool, download: Optional[str]):
if verify:
# Verify receipt
verify_response = client.post(
f"{config.coordinator_url}/v1/agents/receipts/verify",
f"{config.coordinator_url}/agents/receipts/verify",
headers={"X-Api-Key": config.api_key or ""},
json={"receipt": receipt_data}
)
@@ -265,7 +265,7 @@ def network():
pass
@agent.add_command(network)
agent.add_command(network)
@network.command()
@@ -292,7 +292,7 @@ def create(ctx, name: str, agents: str, description: str, coordination: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/networks",
f"{config.coordinator_url}/agents/networks",
headers={"X-Api-Key": config.api_key or ""},
json=network_data
)
@@ -335,7 +335,7 @@ def execute(ctx, network_id: str, task, priority: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/networks/{network_id}/execute",
f"{config.coordinator_url}/agents/networks/{network_id}/execute",
headers={"X-Api-Key": config.api_key or ""},
json=execution_data
)
@@ -370,7 +370,7 @@ def status(ctx, network_id: str, metrics: str, real_time: bool):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/networks/{network_id}/status",
f"{config.coordinator_url}/agents/networks/{network_id}/status",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -401,7 +401,7 @@ def optimize(ctx, network_id: str, objective: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/networks/{network_id}/optimize",
f"{config.coordinator_url}/agents/networks/{network_id}/optimize",
headers={"X-Api-Key": config.api_key or ""},
json=optimization_data
)
@@ -426,7 +426,7 @@ def learning():
pass
@agent.add_command(learning)
agent.add_command(learning)
@learning.command()
@@ -452,7 +452,7 @@ def enable(ctx, agent_id: str, mode: str, feedback_source: Optional[str], learni
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/{agent_id}/learning/enable",
f"{config.coordinator_url}/agents/{agent_id}/learning/enable",
headers={"X-Api-Key": config.api_key or ""},
json=learning_config
)
@@ -494,7 +494,7 @@ def train(ctx, agent_id: str, feedback, epochs: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/{agent_id}/learning/train",
f"{config.coordinator_url}/agents/{agent_id}/learning/train",
headers={"X-Api-Key": config.api_key or ""},
json=training_data
)
@@ -526,7 +526,7 @@ def progress(ctx, agent_id: str, metrics: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/{agent_id}/learning/progress",
f"{config.coordinator_url}/agents/{agent_id}/learning/progress",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -557,7 +557,7 @@ def export(ctx, agent_id: str, format: str, output: Optional[str]):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/{agent_id}/learning/export",
f"{config.coordinator_url}/agents/{agent_id}/learning/export",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -605,7 +605,7 @@ def submit_contribution(ctx, type: str, description: str, github_repo: str, bran
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/contributions",
f"{config.coordinator_url}/agents/contributions",
headers={"X-Api-Key": config.api_key or ""},
json=contribution_data
)

View File

@@ -0,0 +1,496 @@
"""Cross-chain agent communication commands for AITBC CLI"""
import click
import asyncio
import json
from datetime import datetime, timedelta
from typing import Optional
from ..core.config import load_multichain_config
from ..core.agent_communication import (
CrossChainAgentCommunication, AgentInfo, AgentMessage,
MessageType, AgentStatus
)
from ..utils import output, error, success
@click.group()
def agent_comm():
"""Cross-chain agent communication commands"""
pass
@agent_comm.command()
@click.argument('agent_id')
@click.argument('name')
@click.argument('chain_id')
@click.argument('endpoint')
@click.option('--capabilities', help='Comma-separated list of capabilities')
@click.option('--reputation', default=0.5, help='Initial reputation score')
@click.option('--version', default='1.0.0', help='Agent version')
@click.pass_context
def register(ctx, agent_id, name, chain_id, endpoint, capabilities, reputation, version):
"""Register an agent in the cross-chain network"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Parse capabilities
cap_list = capabilities.split(',') if capabilities else []
# Create agent info
agent_info = AgentInfo(
agent_id=agent_id,
name=name,
chain_id=chain_id,
node_id="default-node", # Would be determined dynamically
status=AgentStatus.ACTIVE,
capabilities=cap_list,
reputation_score=reputation,
last_seen=datetime.now(),
endpoint=endpoint,
version=version
)
# Register agent
success = asyncio.run(comm.register_agent(agent_info))
if success:
success(f"Agent {agent_id} registered successfully!")
agent_data = {
"Agent ID": agent_id,
"Name": name,
"Chain ID": chain_id,
"Status": "active",
"Capabilities": ", ".join(cap_list),
"Reputation": f"{reputation:.2f}",
"Endpoint": endpoint,
"Version": version
}
output(agent_data, ctx.obj.get('output_format', 'table'))
else:
error(f"Failed to register agent {agent_id}")
raise click.Abort()
except Exception as e:
error(f"Error registering agent: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.option('--chain-id', help='Filter by chain ID')
@click.option('--status', type=click.Choice(['active', 'inactive', 'busy', 'offline']), help='Filter by status')
@click.option('--capabilities', help='Filter by capabilities (comma-separated)')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def list(ctx, chain_id, status, capabilities, format):
"""List registered agents"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Get all agents
agents = list(comm.agents.values())
# Apply filters
if chain_id:
agents = [a for a in agents if a.chain_id == chain_id]
if status:
agents = [a for a in agents if a.status.value == status]
if capabilities:
required_caps = [cap.strip() for cap in capabilities.split(',')]
agents = [a for a in agents if any(cap in a.capabilities for cap in required_caps)]
if not agents:
output("No agents found", ctx.obj.get('output_format', 'table'))
return
# Format output
agent_data = [
{
"Agent ID": agent.agent_id,
"Name": agent.name,
"Chain ID": agent.chain_id,
"Status": agent.status.value,
"Reputation": f"{agent.reputation_score:.2f}",
"Capabilities": ", ".join(agent.capabilities[:3]), # Show first 3
"Last Seen": agent.last_seen.strftime("%Y-%m-%d %H:%M:%S")
}
for agent in agents
]
output(agent_data, ctx.obj.get('output_format', format), title="Registered Agents")
except Exception as e:
error(f"Error listing agents: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.argument('chain_id')
@click.option('--capabilities', help='Required capabilities (comma-separated)')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def discover(ctx, chain_id, capabilities, format):
"""Discover agents on a specific chain"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Parse capabilities
cap_list = capabilities.split(',') if capabilities else None
# Discover agents
agents = asyncio.run(comm.discover_agents(chain_id, cap_list))
if not agents:
output(f"No agents found on chain {chain_id}", ctx.obj.get('output_format', 'table'))
return
# Format output
agent_data = [
{
"Agent ID": agent.agent_id,
"Name": agent.name,
"Status": agent.status.value,
"Reputation": f"{agent.reputation_score:.2f}",
"Capabilities": ", ".join(agent.capabilities),
"Endpoint": agent.endpoint,
"Version": agent.version
}
for agent in agents
]
output(agent_data, ctx.obj.get('output_format', format), title=f"Agents on Chain {chain_id}")
except Exception as e:
error(f"Error discovering agents: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.argument('sender_id')
@click.argument('receiver_id')
@click.argument('message_type')
@click.argument('chain_id')
@click.option('--payload', help='Message payload (JSON string)')
@click.option('--target-chain', help='Target chain for cross-chain messages')
@click.option('--priority', default=5, help='Message priority (1-10)')
@click.option('--ttl', default=3600, help='Time to live in seconds')
@click.pass_context
def send(ctx, sender_id, receiver_id, message_type, chain_id, payload, target_chain, priority, ttl):
"""Send a message to an agent"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Parse message type
try:
msg_type = MessageType(message_type)
except ValueError:
error(f"Invalid message type: {message_type}")
error(f"Valid types: {[t.value for t in MessageType]}")
raise click.Abort()
# Parse payload
payload_dict = {}
if payload:
try:
payload_dict = json.loads(payload)
except json.JSONDecodeError:
error("Invalid JSON payload")
raise click.Abort()
# Create message
message = AgentMessage(
message_id=f"msg_{datetime.now().strftime('%Y%m%d%H%M%S')}_{sender_id}",
sender_id=sender_id,
receiver_id=receiver_id,
message_type=msg_type,
chain_id=chain_id,
target_chain_id=target_chain,
payload=payload_dict,
timestamp=datetime.now(),
signature="auto_generated", # Would be cryptographically signed
priority=priority,
ttl_seconds=ttl
)
# Send message
success = asyncio.run(comm.send_message(message))
if success:
success(f"Message sent successfully to {receiver_id}")
message_data = {
"Message ID": message.message_id,
"Sender": sender_id,
"Receiver": receiver_id,
"Type": message_type,
"Chain": chain_id,
"Target Chain": target_chain or "Same",
"Priority": priority,
"TTL": f"{ttl}s",
"Sent": message.timestamp.strftime("%Y-%m-%d %H:%M:%S")
}
output(message_data, ctx.obj.get('output_format', 'table'))
else:
error(f"Failed to send message to {receiver_id}")
raise click.Abort()
except Exception as e:
error(f"Error sending message: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.argument('agent_ids', nargs=-1, required=True)
@click.argument('collaboration_type')
@click.option('--governance', help='Governance rules (JSON string)')
@click.pass_context
def collaborate(ctx, agent_ids, collaboration_type, governance):
"""Create a multi-agent collaboration"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Parse governance rules
governance_dict = {}
if governance:
try:
governance_dict = json.loads(governance)
except json.JSONDecodeError:
error("Invalid JSON governance rules")
raise click.Abort()
# Create collaboration
collaboration_id = asyncio.run(comm.create_collaboration(
list(agent_ids), collaboration_type, governance_dict
))
if collaboration_id:
success(f"Collaboration created: {collaboration_id}")
collab_data = {
"Collaboration ID": collaboration_id,
"Type": collaboration_type,
"Participants": ", ".join(agent_ids),
"Status": "active",
"Created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(collab_data, ctx.obj.get('output_format', 'table'))
else:
error("Failed to create collaboration")
raise click.Abort()
except Exception as e:
error(f"Error creating collaboration: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.argument('agent_id')
@click.argument('interaction_result', type=click.Choice(['success', 'failure']))
@click.option('--feedback', type=float, help='Feedback score (0.0-1.0)')
@click.pass_context
def reputation(ctx, agent_id, interaction_result, feedback):
"""Update agent reputation"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Update reputation
success = asyncio.run(comm.update_reputation(
agent_id, interaction_result == 'success', feedback
))
if success:
# Get updated reputation
agent_status = asyncio.run(comm.get_agent_status(agent_id))
if agent_status and agent_status.get('reputation'):
rep = agent_status['reputation']
success(f"Reputation updated for {agent_id}")
rep_data = {
"Agent ID": agent_id,
"Reputation Score": f"{rep['reputation_score']:.3f}",
"Total Interactions": rep['total_interactions'],
"Successful": rep['successful_interactions'],
"Failed": rep['failed_interactions'],
"Success Rate": f"{(rep['successful_interactions'] / rep['total_interactions'] * 100):.1f}%" if rep['total_interactions'] > 0 else "N/A",
"Last Updated": rep['last_updated']
}
output(rep_data, ctx.obj.get('output_format', 'table'))
else:
success(f"Reputation updated for {agent_id}")
else:
error(f"Failed to update reputation for {agent_id}")
raise click.Abort()
except Exception as e:
error(f"Error updating reputation: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.argument('agent_id')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def status(ctx, agent_id, format):
"""Get detailed agent status"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Get agent status
agent_status = asyncio.run(comm.get_agent_status(agent_id))
if not agent_status:
error(f"Agent {agent_id} not found")
raise click.Abort()
# Format output
status_data = [
{"Metric": "Agent ID", "Value": agent_status["agent_info"]["agent_id"]},
{"Metric": "Name", "Value": agent_status["agent_info"]["name"]},
{"Metric": "Chain ID", "Value": agent_status["agent_info"]["chain_id"]},
{"Metric": "Status", "Value": agent_status["status"]},
{"Metric": "Reputation", "Value": f"{agent_status['agent_info']['reputation_score']:.3f}" if agent_status.get('reputation') else "N/A"},
{"Metric": "Capabilities", "Value": ", ".join(agent_status["agent_info"]["capabilities"])},
{"Metric": "Message Queue Size", "Value": agent_status["message_queue_size"]},
{"Metric": "Active Collaborations", "Value": agent_status["active_collaborations"]},
{"Metric": "Last Seen", "Value": agent_status["last_seen"]},
{"Metric": "Endpoint", "Value": agent_status["agent_info"]["endpoint"]},
{"Metric": "Version", "Value": agent_status["agent_info"]["version"]}
]
output(status_data, ctx.obj.get('output_format', format), title=f"Agent Status: {agent_id}")
except Exception as e:
error(f"Error getting agent status: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def network(ctx, format):
"""Get cross-chain network overview"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
# Get network overview
overview = asyncio.run(comm.get_network_overview())
if not overview:
error("No network data available")
raise click.Abort()
# Overview data
overview_data = [
{"Metric": "Total Agents", "Value": overview["total_agents"]},
{"Metric": "Active Agents", "Value": overview["active_agents"]},
{"Metric": "Total Collaborations", "Value": overview["total_collaborations"]},
{"Metric": "Active Collaborations", "Value": overview["active_collaborations"]},
{"Metric": "Total Messages", "Value": overview["total_messages"]},
{"Metric": "Queued Messages", "Value": overview["queued_messages"]},
{"Metric": "Average Reputation", "Value": f"{overview['average_reputation']:.3f}"},
{"Metric": "Routing Table Size", "Value": overview["routing_table_size"]},
{"Metric": "Discovery Cache Size", "Value": overview["discovery_cache_size"]}
]
output(overview_data, ctx.obj.get('output_format', format), title="Network Overview")
# Agents by chain
if overview["agents_by_chain"]:
chain_data = [
{"Chain ID": chain_id, "Total Agents": count, "Active Agents": overview["active_agents_by_chain"].get(chain_id, 0)}
for chain_id, count in overview["agents_by_chain"].items()
]
output(chain_data, ctx.obj.get('output_format', format), title="Agents by Chain")
# Collaborations by type
if overview["collaborations_by_type"]:
collab_data = [
{"Type": collab_type, "Count": count}
for collab_type, count in overview["collaborations_by_type"].items()
]
output(collab_data, ctx.obj.get('output_format', format), title="Collaborations by Type")
except Exception as e:
error(f"Error getting network overview: {str(e)}")
raise click.Abort()
@agent_comm.command()
@click.option('--realtime', is_flag=True, help='Real-time monitoring')
@click.option('--interval', default=10, help='Update interval in seconds')
@click.pass_context
def monitor(ctx, realtime, interval):
"""Monitor cross-chain agent communication"""
try:
config = load_multichain_config()
comm = CrossChainAgentCommunication(config)
if realtime:
# Real-time monitoring
from rich.console import Console
from rich.live import Live
from rich.table import Table
import time
console = Console()
def generate_monitor_table():
try:
overview = asyncio.run(comm.get_network_overview())
table = Table(title=f"Agent Network Monitor - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
table.add_column("Metric", style="cyan")
table.add_column("Value", style="green")
table.add_row("Total Agents", str(overview["total_agents"]))
table.add_row("Active Agents", str(overview["active_agents"]))
table.add_row("Active Collaborations", str(overview["active_collaborations"]))
table.add_row("Queued Messages", str(overview["queued_messages"]))
table.add_row("Avg Reputation", f"{overview['average_reputation']:.3f}")
# Add top chains by agent count
if overview["agents_by_chain"]:
table.add_row("", "")
table.add_row("Top Chains by Agents", "")
for chain_id, count in sorted(overview["agents_by_chain"].items(), key=lambda x: x[1], reverse=True)[:3]:
active = overview["active_agents_by_chain"].get(chain_id, 0)
table.add_row(f" {chain_id}", f"{count} total, {active} active")
return table
except Exception as e:
return f"Error getting network data: {e}"
with Live(generate_monitor_table(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_table())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
else:
# Single snapshot
overview = asyncio.run(comm.get_network_overview())
monitor_data = [
{"Metric": "Total Agents", "Value": overview["total_agents"]},
{"Metric": "Active Agents", "Value": overview["active_agents"]},
{"Metric": "Total Collaborations", "Value": overview["total_collaborations"]},
{"Metric": "Active Collaborations", "Value": overview["active_collaborations"]},
{"Metric": "Total Messages", "Value": overview["total_messages"]},
{"Metric": "Queued Messages", "Value": overview["queued_messages"]},
{"Metric": "Average Reputation", "Value": f"{overview['average_reputation']:.3f}"},
{"Metric": "Routing Table Size", "Value": overview["routing_table_size"]}
]
output(monitor_data, ctx.obj.get('output_format', 'table'), title="Agent Network Monitor")
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()

View File

@@ -0,0 +1,402 @@
"""Analytics and monitoring commands for AITBC CLI"""
import click
import asyncio
from datetime import datetime, timedelta
from typing import Optional
from ..core.config import load_multichain_config
from ..core.analytics import ChainAnalytics
from ..utils import output, error, success
@click.group()
def analytics():
"""Chain analytics and monitoring commands"""
pass
@analytics.command()
@click.option('--chain-id', help='Specific chain ID to analyze')
@click.option('--hours', default=24, help='Time range in hours')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def summary(ctx, chain_id, hours, format):
"""Get performance summary for chains"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
if chain_id:
# Single chain summary
summary = analytics.get_chain_performance_summary(chain_id, hours)
if not summary:
error(f"No data available for chain {chain_id}")
raise click.Abort()
# Format summary for display
summary_data = [
{"Metric": "Chain ID", "Value": summary["chain_id"]},
{"Metric": "Time Range", "Value": f"{summary['time_range_hours']} hours"},
{"Metric": "Data Points", "Value": summary["data_points"]},
{"Metric": "Health Score", "Value": f"{summary['health_score']:.1f}/100"},
{"Metric": "Active Alerts", "Value": summary["active_alerts"]},
{"Metric": "Avg TPS", "Value": f"{summary['statistics']['tps']['avg']:.2f}"},
{"Metric": "Avg Block Time", "Value": f"{summary['statistics']['block_time']['avg']:.2f}s"},
{"Metric": "Avg Gas Price", "Value": f"{summary['statistics']['gas_price']['avg']:,} wei"}
]
output(summary_data, ctx.obj.get('output_format', format), title=f"Chain Summary: {chain_id}")
else:
# Cross-chain analysis
analysis = analytics.get_cross_chain_analysis()
if not analysis:
error("No analytics data available")
raise click.Abort()
# Overview data
overview_data = [
{"Metric": "Total Chains", "Value": analysis["total_chains"]},
{"Metric": "Active Chains", "Value": analysis["active_chains"]},
{"Metric": "Total Alerts", "Value": analysis["alerts_summary"]["total_alerts"]},
{"Metric": "Critical Alerts", "Value": analysis["alerts_summary"]["critical_alerts"]},
{"Metric": "Total Memory Usage", "Value": f"{analysis['resource_usage']['total_memory_mb']:.1f}MB"},
{"Metric": "Total Disk Usage", "Value": f"{analysis['resource_usage']['total_disk_mb']:.1f}MB"},
{"Metric": "Total Clients", "Value": analysis["resource_usage"]["total_clients"]},
{"Metric": "Total Agents", "Value": analysis["resource_usage"]["total_agents"]}
]
output(overview_data, ctx.obj.get('output_format', format), title="Cross-Chain Analysis Overview")
# Performance comparison
if analysis["performance_comparison"]:
comparison_data = [
{
"Chain ID": chain_id,
"TPS": f"{data['tps']:.2f}",
"Block Time": f"{data['block_time']:.2f}s",
"Health Score": f"{data['health_score']:.1f}/100"
}
for chain_id, data in analysis["performance_comparison"].items()
]
output(comparison_data, ctx.obj.get('output_format', format), title="Chain Performance Comparison")
except Exception as e:
error(f"Error getting analytics summary: {str(e)}")
raise click.Abort()
@analytics.command()
@click.option('--realtime', is_flag=True, help='Real-time monitoring')
@click.option('--interval', default=30, help='Update interval in seconds')
@click.option('--chain-id', help='Monitor specific chain')
@click.pass_context
def monitor(ctx, realtime, interval, chain_id):
"""Monitor chain performance in real-time"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
if realtime:
# Real-time monitoring
from rich.console import Console
from rich.live import Live
from rich.table import Table
import time
console = Console()
def generate_monitor_table():
try:
# Collect latest metrics
asyncio.run(analytics.collect_all_metrics())
table = Table(title=f"Chain Monitor - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
table.add_column("Chain ID", style="cyan")
table.add_column("TPS", style="green")
table.add_column("Block Time", style="yellow")
table.add_column("Health", style="red")
table.add_column("Alerts", style="magenta")
if chain_id:
# Single chain monitoring
summary = analytics.get_chain_performance_summary(chain_id, 1)
if summary:
health_color = "green" if summary["health_score"] > 70 else "yellow" if summary["health_score"] > 40 else "red"
table.add_row(
chain_id,
f"{summary['statistics']['tps']['avg']:.2f}",
f"{summary['statistics']['block_time']['avg']:.2f}s",
f"[{health_color}]{summary['health_score']:.1f}[/{health_color}]",
str(summary["active_alerts"])
)
else:
# All chains monitoring
analysis = analytics.get_cross_chain_analysis()
for chain_id, data in analysis["performance_comparison"].items():
health_color = "green" if data["health_score"] > 70 else "yellow" if data["health_score"] > 40 else "red"
table.add_row(
chain_id,
f"{data['tps']:.2f}",
f"{data['block_time']:.2f}s",
f"[{health_color}]{data['health_score']:.1f}[/{health_color}]",
str(len([a for a in analytics.alerts if a.chain_id == chain_id]))
)
return table
except Exception as e:
return f"Error collecting metrics: {e}"
with Live(generate_monitor_table(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_table())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
else:
# Single snapshot
asyncio.run(analytics.collect_all_metrics())
if chain_id:
summary = analytics.get_chain_performance_summary(chain_id, 1)
if not summary:
error(f"No data available for chain {chain_id}")
raise click.Abort()
monitor_data = [
{"Metric": "Chain ID", "Value": summary["chain_id"]},
{"Metric": "Current TPS", "Value": f"{summary['statistics']['tps']['avg']:.2f}"},
{"Metric": "Current Block Time", "Value": f"{summary['statistics']['block_time']['avg']:.2f}s"},
{"Metric": "Health Score", "Value": f"{summary['health_score']:.1f}/100"},
{"Metric": "Active Alerts", "Value": summary["active_alerts"]},
{"Metric": "Memory Usage", "Value": f"{summary['latest_metrics']['memory_usage_mb']:.1f}MB"},
{"Metric": "Disk Usage", "Value": f"{summary['latest_metrics']['disk_usage_mb']:.1f}MB"},
{"Metric": "Active Nodes", "Value": summary["latest_metrics"]["active_nodes"]},
{"Metric": "Client Count", "Value": summary["latest_metrics"]["client_count"]},
{"Metric": "Agent Count", "Value": summary["latest_metrics"]["agent_count"]}
]
output(monitor_data, ctx.obj.get('output_format', 'table'), title=f"Chain Monitor: {chain_id}")
else:
analysis = analytics.get_cross_chain_analysis()
monitor_data = [
{"Metric": "Total Chains", "Value": analysis["total_chains"]},
{"Metric": "Active Chains", "Value": analysis["active_chains"]},
{"Metric": "Total Memory Usage", "Value": f"{analysis['resource_usage']['total_memory_mb']:.1f}MB"},
{"Metric": "Total Disk Usage", "Value": f"{analysis['resource_usage']['total_disk_mb']:.1f}MB"},
{"Metric": "Total Clients", "Value": analysis["resource_usage"]["total_clients"]},
{"Metric": "Total Agents", "Value": analysis["resource_usage"]["total_agents"]},
{"Metric": "Total Alerts", "Value": analysis["alerts_summary"]["total_alerts"]},
{"Metric": "Critical Alerts", "Value": analysis["alerts_summary"]["critical_alerts"]}
]
output(monitor_data, ctx.obj.get('output_format', 'table'), title="System Monitor")
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()
@analytics.command()
@click.option('--chain-id', help='Specific chain ID for predictions')
@click.option('--hours', default=24, help='Prediction time horizon in hours')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def predict(ctx, chain_id, hours, format):
"""Predict chain performance"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
# Collect current metrics first
asyncio.run(analytics.collect_all_metrics())
if chain_id:
# Single chain prediction
predictions = asyncio.run(analytics.predict_chain_performance(chain_id, hours))
if not predictions:
error(f"No prediction data available for chain {chain_id}")
raise click.Abort()
prediction_data = [
{
"Metric": pred.metric,
"Predicted Value": f"{pred.predicted_value:.2f}",
"Confidence": f"{pred.confidence:.1%}",
"Time Horizon": f"{pred.time_horizon_hours}h"
}
for pred in predictions
]
output(prediction_data, ctx.obj.get('output_format', format), title=f"Performance Predictions: {chain_id}")
else:
# All chains prediction
analysis = analytics.get_cross_chain_analysis()
all_predictions = {}
for chain_id in analysis["performance_comparison"].keys():
predictions = asyncio.run(analytics.predict_chain_performance(chain_id, hours))
if predictions:
all_predictions[chain_id] = predictions
if not all_predictions:
error("No prediction data available")
raise click.Abort()
# Format predictions for display
prediction_data = []
for chain_id, predictions in all_predictions.items():
for pred in predictions:
prediction_data.append({
"Chain ID": chain_id,
"Metric": pred.metric,
"Predicted Value": f"{pred.predicted_value:.2f}",
"Confidence": f"{pred.confidence:.1%}",
"Time Horizon": f"{pred.time_horizon_hours}h"
})
output(prediction_data, ctx.obj.get('output_format', format), title="Chain Performance Predictions")
except Exception as e:
error(f"Error generating predictions: {str(e)}")
raise click.Abort()
@analytics.command()
@click.option('--chain-id', help='Specific chain ID for recommendations')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def optimize(ctx, chain_id, format):
"""Get optimization recommendations"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
# Collect current metrics first
asyncio.run(analytics.collect_all_metrics())
if chain_id:
# Single chain recommendations
recommendations = analytics.get_optimization_recommendations(chain_id)
if not recommendations:
success(f"No optimization recommendations for chain {chain_id}")
return
recommendation_data = [
{
"Type": rec["type"],
"Priority": rec["priority"],
"Issue": rec["issue"],
"Current Value": rec["current_value"],
"Recommended Action": rec["recommended_action"],
"Expected Improvement": rec["expected_improvement"]
}
for rec in recommendations
]
output(recommendation_data, ctx.obj.get('output_format', format), title=f"Optimization Recommendations: {chain_id}")
else:
# All chains recommendations
analysis = analytics.get_cross_chain_analysis()
all_recommendations = {}
for chain_id in analysis["performance_comparison"].keys():
recommendations = analytics.get_optimization_recommendations(chain_id)
if recommendations:
all_recommendations[chain_id] = recommendations
if not all_recommendations:
success("No optimization recommendations available")
return
# Format recommendations for display
recommendation_data = []
for chain_id, recommendations in all_recommendations.items():
for rec in recommendations:
recommendation_data.append({
"Chain ID": chain_id,
"Type": rec["type"],
"Priority": rec["priority"],
"Issue": rec["issue"],
"Current Value": rec["current_value"],
"Recommended Action": rec["recommended_action"]
})
output(recommendation_data, ctx.obj.get('output_format', format), title="Chain Optimization Recommendations")
except Exception as e:
error(f"Error getting optimization recommendations: {str(e)}")
raise click.Abort()
@analytics.command()
@click.option('--severity', type=click.Choice(['all', 'critical', 'warning']), default='all', help='Alert severity filter')
@click.option('--hours', default=24, help='Time range in hours')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def alerts(ctx, severity, hours, format):
"""View performance alerts"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
# Collect current metrics first
asyncio.run(analytics.collect_all_metrics())
# Filter alerts
cutoff_time = datetime.now() - timedelta(hours=hours)
filtered_alerts = [
alert for alert in analytics.alerts
if alert.timestamp >= cutoff_time
]
if severity != 'all':
filtered_alerts = [a for a in filtered_alerts if a.severity == severity]
if not filtered_alerts:
success("No alerts found")
return
alert_data = [
{
"Chain ID": alert.chain_id,
"Type": alert.alert_type,
"Severity": alert.severity,
"Message": alert.message,
"Current Value": f"{alert.current_value:.2f}",
"Threshold": f"{alert.threshold:.2f}",
"Time": alert.timestamp.strftime("%Y-%m-%d %H:%M:%S")
}
for alert in filtered_alerts
]
output(alert_data, ctx.obj.get('output_format', format), title=f"Performance Alerts (Last {hours}h)")
except Exception as e:
error(f"Error getting alerts: {str(e)}")
raise click.Abort()
@analytics.command()
@click.option('--format', type=click.Choice(['json']), default='json', help='Output format')
@click.pass_context
def dashboard(ctx, format):
"""Get complete dashboard data"""
try:
config = load_multichain_config()
analytics = ChainAnalytics(config)
# Collect current metrics
asyncio.run(analytics.collect_all_metrics())
# Get dashboard data
dashboard_data = analytics.get_dashboard_data()
if format == 'json':
import json
click.echo(json.dumps(dashboard_data, indent=2, default=str))
else:
error("Dashboard data only available in JSON format")
raise click.Abort()
except Exception as e:
error(f"Error getting dashboard data: {str(e)}")
raise click.Abort()

View File

@@ -2,6 +2,18 @@
import click
import httpx
def _get_node_endpoint(ctx):
try:
from ..core.config import load_multichain_config
config = load_multichain_config()
if not config.nodes:
return "http://127.0.0.1:8082"
# Return the first node's endpoint
return list(config.nodes.values())[0].endpoint
except:
return "http://127.0.0.1:8082"
from typing import Optional, List
from ..utils import output, error
@@ -27,7 +39,7 @@ def blocks(ctx, limit: int, from_height: Optional[int]):
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/explorer/blocks",
f"{config.coordinator_url}/explorer/blocks",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -51,7 +63,7 @@ def block(ctx, block_hash: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/explorer/blocks/{block_hash}",
f"{config.coordinator_url}/explorer/blocks/{block_hash}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -74,7 +86,7 @@ def transaction(ctx, tx_hash: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/explorer/transactions/{tx_hash}",
f"{config.coordinator_url}/explorer/transactions/{tx_hash}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -108,8 +120,10 @@ def status(ctx, node: int):
try:
with httpx.Client() as client:
# First get health for general status
health_url = rpc_url.replace("/rpc", "") + "/health" if "/rpc" in rpc_url else rpc_url + "/health"
response = client.get(
f"{rpc_url}/head",
health_url,
timeout=5
)
@@ -135,7 +149,7 @@ def sync_status(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/blockchain/sync",
f"{config.coordinator_url}/health",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -157,7 +171,7 @@ def peers(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/blockchain/peers",
f"{config.coordinator_url}/health",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -179,7 +193,7 @@ def info(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/blockchain/info",
f"{config.coordinator_url}/health",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -201,7 +215,7 @@ def supply(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/blockchain/supply",
f"{config.coordinator_url}/health",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -223,7 +237,7 @@ def validators(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/blockchain/validators",
f"{config.coordinator_url}/health",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -234,3 +248,148 @@ def validators(ctx):
error(f"Failed to get validators: {response.status_code}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--chain-id', required=True, help='Chain ID')
@click.pass_context
def genesis(ctx, chain_id):
"""Get the genesis block of a chain"""
config = ctx.obj['config']
try:
import httpx
with httpx.Client() as client:
# We assume node 1 is running on port 8082, but let's just hit the first configured node
response = client.get(
f"{_get_node_endpoint(ctx)}/rpc/blocks/0?chain_id={chain_id}",
timeout=5
)
if response.status_code == 200:
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to get genesis block: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--chain-id', required=True, help='Chain ID')
@click.pass_context
def transactions(ctx, chain_id):
"""Get latest transactions on a chain"""
config = ctx.obj['config']
try:
import httpx
with httpx.Client() as client:
response = client.get(
f"{_get_node_endpoint(ctx)}/rpc/transactions?chain_id={chain_id}",
timeout=5
)
if response.status_code == 200:
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to get transactions: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--chain-id', required=True, help='Chain ID')
@click.pass_context
def head(ctx, chain_id):
"""Get the head block of a chain"""
config = ctx.obj['config']
try:
import httpx
with httpx.Client() as client:
response = client.get(
f"{_get_node_endpoint(ctx)}/rpc/head?chain_id={chain_id}",
timeout=5
)
if response.status_code == 200:
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to get head block: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--chain-id', required=True, help='Chain ID')
@click.option('--from', 'from_addr', required=True, help='Sender address')
@click.option('--to', required=True, help='Recipient address')
@click.option('--data', required=True, help='Transaction data payload')
@click.option('--nonce', type=int, default=0, help='Nonce')
@click.pass_context
def send(ctx, chain_id, from_addr, to, data, nonce):
"""Send a transaction to a chain"""
config = ctx.obj['config']
try:
import httpx
with httpx.Client() as client:
tx_payload = {
"type": "TRANSFER",
"chain_id": chain_id,
"from_address": from_addr,
"to_address": to,
"value": 0,
"gas_limit": 100000,
"gas_price": 1,
"nonce": nonce,
"data": data,
"signature": "mock_signature"
}
response = client.post(
f"{_get_node_endpoint(ctx)}/rpc/sendTx",
json=tx_payload,
timeout=5
)
if response.status_code in (200, 201):
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to send transaction: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--address', required=True, help='Wallet address')
@click.pass_context
def balance(ctx, address):
"""Get the balance of an address across all chains"""
config = ctx.obj['config']
try:
import httpx
# Balance is typically served by the coordinator API or blockchain node directly
# The node has /rpc/getBalance/{address} but it expects chain_id param. Let's just query devnet for now.
with httpx.Client() as client:
response = client.get(
f"{_get_node_endpoint(ctx)}/rpc/getBalance/{address}?chain_id=ait-devnet",
timeout=5
)
if response.status_code == 200:
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to get balance: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")
@blockchain.command()
@click.option('--address', required=True, help='Wallet address')
@click.option('--amount', type=int, default=1000, help='Amount to mint')
@click.pass_context
def faucet(ctx, address, amount):
"""Mint devnet funds to an address"""
config = ctx.obj['config']
try:
import httpx
with httpx.Client() as client:
response = client.post(
f"{_get_node_endpoint(ctx)}/rpc/admin/mintFaucet",
json={"address": address, "amount": amount, "chain_id": "ait-devnet"},
timeout=5
)
if response.status_code in (200, 201):
output(response.json(), ctx.obj['output_format'])
else:
error(f"Failed to use faucet: {response.status_code} - {response.text}")
except Exception as e:
error(f"Network error: {e}")

View File

@@ -0,0 +1,491 @@
"""Chain management commands for AITBC CLI"""
import click
from typing import Optional
from ..core.chain_manager import ChainManager, ChainNotFoundError, NodeNotAvailableError
from ..core.config import MultiChainConfig, load_multichain_config
from ..models.chain import ChainType
from ..utils import output, error, success
@click.group()
def chain():
"""Multi-chain management commands"""
pass
@chain.command()
@click.option('--type', 'chain_type', type=click.Choice(['main', 'topic', 'private', 'all']),
default='all', help='Filter by chain type')
@click.option('--show-private', is_flag=True, help='Show private chains')
@click.option('--sort', type=click.Choice(['id', 'size', 'nodes', 'created']),
default='id', help='Sort by field')
@click.pass_context
def list(ctx, chain_type, show_private, sort):
"""List all available chains"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
# Get chains
import asyncio
chains = asyncio.run(chain_manager.list_chains(
chain_type=ChainType(chain_type) if chain_type != 'all' else None,
include_private=show_private,
sort_by=sort
))
if not chains:
output("No chains found", ctx.obj.get('output_format', 'table'))
return
# Format output
chains_data = [
{
"Chain ID": chain.id,
"Type": chain.type.value,
"Purpose": chain.purpose,
"Name": chain.name,
"Size": f"{chain.size_mb:.1f}MB",
"Nodes": chain.node_count,
"Contracts": chain.contract_count,
"Clients": chain.client_count,
"Miners": chain.miner_count,
"Status": chain.status.value
}
for chain in chains
]
output(chains_data, ctx.obj.get('output_format', 'table'), title="AITBC Chains")
except Exception as e:
error(f"Error listing chains: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.option('--detailed', is_flag=True, help='Show detailed information')
@click.option('--metrics', is_flag=True, help='Show performance metrics')
@click.pass_context
def info(ctx, chain_id, detailed, metrics):
"""Get detailed information about a chain"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
import asyncio
chain_info = asyncio.run(chain_manager.get_chain_info(chain_id, detailed, metrics))
# Basic information
basic_info = {
"Chain ID": chain_info.id,
"Type": chain_info.type.value,
"Purpose": chain_info.purpose,
"Name": chain_info.name,
"Description": chain_info.description or "No description",
"Status": chain_info.status.value,
"Created": chain_info.created_at.strftime("%Y-%m-%d %H:%M:%S"),
"Block Height": chain_info.block_height,
"Size": f"{chain_info.size_mb:.1f}MB"
}
output(basic_info, ctx.obj.get('output_format', 'table'), title=f"Chain Information: {chain_id}")
if detailed:
# Network details
network_info = {
"Total Nodes": chain_info.node_count,
"Active Nodes": chain_info.active_nodes,
"Consensus": chain_info.consensus_algorithm.value,
"Block Time": f"{chain_info.block_time}s",
"Clients": chain_info.client_count,
"Miners": chain_info.miner_count,
"Contracts": chain_info.contract_count,
"Agents": chain_info.agent_count,
"Privacy": chain_info.privacy.visibility,
"Access Control": chain_info.privacy.access_control
}
output(network_info, ctx.obj.get('output_format', 'table'), title="Network Details")
if metrics:
# Performance metrics
performance_info = {
"TPS": f"{chain_info.tps:.1f}",
"Avg Block Time": f"{chain_info.avg_block_time:.1f}s",
"Avg Gas Used": f"{chain_info.avg_gas_used:,}",
"Gas Price": f"{chain_info.gas_price / 1e9:.1f} gwei",
"Growth Rate": f"{chain_info.growth_rate_mb_per_day:.1f}MB/day",
"Memory Usage": f"{chain_info.memory_usage_mb:.1f}MB",
"Disk Usage": f"{chain_info.disk_usage_mb:.1f}MB"
}
output(performance_info, ctx.obj.get('output_format', 'table'), title="Performance Metrics")
except ChainNotFoundError:
error(f"Chain {chain_id} not found")
raise click.Abort()
except Exception as e:
error(f"Error getting chain info: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('config_file', type=click.Path(exists=True))
@click.option('--node', help='Target node for chain creation')
@click.option('--dry-run', is_flag=True, help='Show what would be created without actually creating')
@click.pass_context
def create(ctx, config_file, node, dry_run):
"""Create a new chain from configuration file"""
try:
import yaml
from ..models.chain import ChainConfig
config = load_multichain_config()
chain_manager = ChainManager(config)
# Load and validate configuration
with open(config_file, 'r') as f:
config_data = yaml.safe_load(f)
chain_config = ChainConfig(**config_data['chain'])
if dry_run:
dry_run_info = {
"Chain Type": chain_config.type.value,
"Purpose": chain_config.purpose,
"Name": chain_config.name,
"Description": chain_config.description or "No description",
"Consensus": chain_config.consensus.algorithm.value,
"Privacy": chain_config.privacy.visibility,
"Target Node": node or "Auto-selected"
}
output(dry_run_info, ctx.obj.get('output_format', 'table'), title="Dry Run - Chain Creation")
return
# Create chain
chain_id = chain_manager.create_chain(chain_config, node)
success(f"Chain created successfully!")
result = {
"Chain ID": chain_id,
"Type": chain_config.type.value,
"Purpose": chain_config.purpose,
"Name": chain_config.name,
"Node": node or "Auto-selected"
}
output(result, ctx.obj.get('output_format', 'table'))
if chain_config.privacy.visibility == "private":
success("Private chain created! Use access codes to invite participants.")
except Exception as e:
error(f"Error creating chain: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.option('--force', is_flag=True, help='Force deletion without confirmation')
@click.option('--confirm', is_flag=True, help='Confirm deletion')
@click.pass_context
def delete(ctx, chain_id, force, confirm):
"""Delete a chain permanently"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
# Get chain information for confirmation
chain_info = chain_manager.get_chain_info(chain_id, detailed=True)
if not force:
# Show warning and confirmation
warning_info = {
"Chain ID": chain_id,
"Type": chain_info.type.value,
"Purpose": chain_info.purpose,
"Name": chain_info.name,
"Status": chain_info.status.value,
"Participants": chain_info.client_count,
"Transactions": "Multiple" # Would get actual count
}
output(warning_info, ctx.obj.get('output_format', 'table'), title="Chain Deletion Warning")
if not confirm:
error("To confirm deletion, use --confirm flag")
raise click.Abort()
# Delete chain
success = chain_manager.delete_chain(chain_id, force)
if success:
success(f"Chain {chain_id} deleted successfully!")
else:
error(f"Failed to delete chain {chain_id}")
raise click.Abort()
except ChainNotFoundError:
error(f"Chain {chain_id} not found")
raise click.Abort()
except Exception as e:
error(f"Error deleting chain: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.argument('node_id')
@click.pass_context
def add(ctx, chain_id, node_id):
"""Add a chain to a specific node"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
success = chain_manager.add_chain_to_node(chain_id, node_id)
if success:
success(f"Chain {chain_id} added to node {node_id} successfully!")
else:
error(f"Failed to add chain {chain_id} to node {node_id}")
raise click.Abort()
except Exception as e:
error(f"Error adding chain to node: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.argument('node_id')
@click.option('--migrate', is_flag=True, help='Migrate to another node before removal')
@click.pass_context
def remove(ctx, chain_id, node_id, migrate):
"""Remove a chain from a specific node"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
success = chain_manager.remove_chain_from_node(chain_id, node_id, migrate)
if success:
success(f"Chain {chain_id} removed from node {node_id} successfully!")
else:
error(f"Failed to remove chain {chain_id} from node {node_id}")
raise click.Abort()
except Exception as e:
error(f"Error removing chain from node: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.argument('from_node')
@click.argument('to_node')
@click.option('--dry-run', is_flag=True, help='Show migration plan without executing')
@click.option('--verify', is_flag=True, help='Verify migration after completion')
@click.pass_context
def migrate(ctx, chain_id, from_node, to_node, dry_run, verify):
"""Migrate a chain between nodes"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
migration_result = chain_manager.migrate_chain(chain_id, from_node, to_node, dry_run)
if dry_run:
plan_info = {
"Chain ID": chain_id,
"Source Node": from_node,
"Target Node": to_node,
"Feasible": "Yes" if migration_result.success else "No",
"Estimated Time": f"{migration_result.transfer_time_seconds}s",
"Error": migration_result.error or "None"
}
output(plan_info, ctx.obj.get('output_format', 'table'), title="Migration Plan")
return
if migration_result.success:
success(f"Chain migration completed successfully!")
result = {
"Chain ID": chain_id,
"Source Node": from_node,
"Target Node": to_node,
"Blocks Transferred": migration_result.blocks_transferred,
"Transfer Time": f"{migration_result.transfer_time_seconds}s",
"Verification": "Passed" if migration_result.verification_passed else "Failed"
}
output(result, ctx.obj.get('output_format', 'table'))
else:
error(f"Migration failed: {migration_result.error}")
raise click.Abort()
except Exception as e:
error(f"Error during migration: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.option('--path', help='Backup directory path')
@click.option('--compress', is_flag=True, help='Compress backup')
@click.option('--verify', is_flag=True, help='Verify backup integrity')
@click.pass_context
def backup(ctx, chain_id, path, compress, verify):
"""Backup chain data"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
backup_result = chain_manager.backup_chain(chain_id, path, compress, verify)
success(f"Chain backup completed successfully!")
result = {
"Chain ID": chain_id,
"Backup File": backup_result.backup_file,
"Original Size": f"{backup_result.original_size_mb:.1f}MB",
"Backup Size": f"{backup_result.backup_size_mb:.1f}MB",
"Compression": f"{backup_result.compression_ratio:.1f}x" if compress else "None",
"Checksum": backup_result.checksum,
"Verification": "Passed" if backup_result.verification_passed else "Failed"
}
output(result, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error during backup: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('backup_file', type=click.Path(exists=True))
@click.option('--node', help='Target node for restoration')
@click.option('--verify', is_flag=True, help='Verify restoration')
@click.pass_context
def restore(ctx, backup_file, node, verify):
"""Restore chain from backup"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
restore_result = chain_manager.restore_chain(backup_file, node, verify)
success(f"Chain restoration completed successfully!")
result = {
"Chain ID": restore_result.chain_id,
"Node": restore_result.node_id,
"Blocks Restored": restore_result.blocks_restored,
"Verification": "Passed" if restore_result.verification_passed else "Failed"
}
output(result, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error during restoration: {str(e)}")
raise click.Abort()
@chain.command()
@click.argument('chain_id')
@click.option('--realtime', is_flag=True, help='Real-time monitoring')
@click.option('--export', help='Export monitoring data to file')
@click.option('--interval', default=5, help='Update interval in seconds')
@click.pass_context
def monitor(ctx, chain_id, realtime, export, interval):
"""Monitor chain activity"""
try:
config = load_multichain_config()
chain_manager = ChainManager(config)
if realtime:
# Real-time monitoring (placeholder implementation)
from rich.console import Console
from rich.layout import Layout
from rich.live import Live
import time
console = Console()
def generate_monitor_layout():
try:
chain_info = chain_manager.get_chain_info(chain_id, detailed=True, metrics=True)
layout = Layout()
layout.split_column(
Layout(name="header", size=3),
Layout(name="stats"),
Layout(name="activity", size=10)
)
# Header
layout["header"].update(
f"Chain Monitor: {chain_id} - {chain_info.status.value.upper()}"
)
# Stats table
stats_data = [
["Block Height", str(chain_info.block_height)],
["TPS", f"{chain_info.tps:.1f}"],
["Active Nodes", str(chain_info.active_nodes)],
["Gas Price", f"{chain_info.gas_price / 1e9:.1f} gwei"],
["Memory Usage", f"{chain_info.memory_usage_mb:.1f}MB"],
["Disk Usage", f"{chain_info.disk_usage_mb:.1f}MB"]
]
layout["stats"].update(str(stats_data))
# Recent activity (placeholder)
layout["activity"].update("Recent activity would be displayed here")
return layout
except Exception as e:
return f"Error getting chain info: {e}"
with Live(generate_monitor_layout(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_layout())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
else:
# Single snapshot
chain_info = chain_manager.get_chain_info(chain_id, detailed=True, metrics=True)
stats_data = [
{
"Metric": "Block Height",
"Value": str(chain_info.block_height)
},
{
"Metric": "TPS",
"Value": f"{chain_info.tps:.1f}"
},
{
"Metric": "Active Nodes",
"Value": str(chain_info.active_nodes)
},
{
"Metric": "Gas Price",
"Value": f"{chain_info.gas_price / 1e9:.1f} gwei"
},
{
"Metric": "Memory Usage",
"Value": f"{chain_info.memory_usage_mb:.1f}MB"
},
{
"Metric": "Disk Usage",
"Value": f"{chain_info.disk_usage_mb:.1f}MB"
}
]
output(stats_data, ctx.obj.get('output_format', 'table'), title=f"Chain Statistics: {chain_id}")
if export:
import json
with open(export, 'w') as f:
json.dump(chain_info.dict(), f, indent=2, default=str)
success(f"Statistics exported to {export}")
except ChainNotFoundError:
error(f"Chain {chain_id} not found")
raise click.Abort()
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()

View File

@@ -48,7 +48,7 @@ def submit(ctx, job_type: str, prompt: Optional[str], model: Optional[str],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/jobs",
f"{config.coordinator_url}/jobs",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -98,7 +98,7 @@ def status(ctx, job_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/jobs/{job_id}",
f"{config.coordinator_url}/jobs/{job_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -123,7 +123,7 @@ def blocks(ctx, limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/explorer/blocks",
f"{config.coordinator_url}/explorer/blocks",
params={"limit": limit},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -149,7 +149,7 @@ def cancel(ctx, job_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/jobs/{job_id}/cancel",
f"{config.coordinator_url}/jobs/{job_id}/cancel",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -181,7 +181,7 @@ def receipts(ctx, limit: int, job_id: Optional[str], status: Optional[str]):
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/explorer/receipts",
f"{config.coordinator_url}/explorer/receipts",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -222,7 +222,7 @@ def history(ctx, limit: int, status: Optional[str], type: Optional[str],
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/jobs/history",
f"{config.coordinator_url}/jobs",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -283,7 +283,7 @@ def batch_submit(ctx, file_path: str, file_format: Optional[str], retries: int,
with httpx.Client() as http_client:
response = http_client.post(
f"{config.coordinator_url}/v1/jobs",
f"{config.coordinator_url}/jobs",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -387,7 +387,7 @@ def pay(ctx, job_id: str, amount: float, currency: str, payment_method: str, esc
try:
with httpx.Client() as http_client:
response = http_client.post(
f"{config.coordinator_url}/v1/payments",
f"{config.coordinator_url}/payments",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -422,7 +422,7 @@ def payment_status(ctx, job_id: str):
try:
with httpx.Client() as http_client:
response = http_client.get(
f"{config.coordinator_url}/v1/jobs/{job_id}/payment",
f"{config.coordinator_url}/jobs/{job_id}/payment",
headers={"X-Api-Key": config.api_key or ""}
)
if response.status_code == 200:
@@ -448,7 +448,7 @@ def payment_receipt(ctx, payment_id: str):
try:
with httpx.Client() as http_client:
response = http_client.get(
f"{config.coordinator_url}/v1/payments/{payment_id}/receipt",
f"{config.coordinator_url}/payments/{payment_id}/receipt",
headers={"X-Api-Key": config.api_key or ""}
)
if response.status_code == 200:
@@ -476,7 +476,7 @@ def refund(ctx, job_id: str, payment_id: str, reason: str):
try:
with httpx.Client() as http_client:
response = http_client.post(
f"{config.coordinator_url}/v1/payments/{payment_id}/refund",
f"{config.coordinator_url}/payments/{payment_id}/refund",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""

View File

@@ -0,0 +1,378 @@
"""Production deployment and scaling commands for AITBC CLI"""
import click
import asyncio
import json
from datetime import datetime
from typing import Optional
from ..core.deployment import (
ProductionDeployment, ScalingPolicy, DeploymentStatus
)
from ..utils import output, error, success
@click.group()
def deploy():
"""Production deployment and scaling commands"""
pass
@deploy.command()
@click.argument('name')
@click.argument('environment')
@click.argument('region')
@click.argument('instance_type')
@click.argument('min_instances', type=int)
@click.argument('max_instances', type=int)
@click.argument('desired_instances', type=int)
@click.argument('port', type=int)
@click.argument('domain')
@click.option('--db-host', default='localhost', help='Database host')
@click.option('--db-port', default=5432, help='Database port')
@click.option('--db-name', default='aitbc', help='Database name')
@click.pass_context
def create(ctx, name, environment, region, instance_type, min_instances, max_instances, desired_instances, port, domain, db_host, db_port, db_name):
"""Create a new deployment configuration"""
try:
deployment = ProductionDeployment()
# Database configuration
database_config = {
"host": db_host,
"port": db_port,
"name": db_name,
"ssl_enabled": True if environment == "production" else False
}
# Create deployment
deployment_id = asyncio.run(deployment.create_deployment(
name=name,
environment=environment,
region=region,
instance_type=instance_type,
min_instances=min_instances,
max_instances=max_instances,
desired_instances=desired_instances,
port=port,
domain=domain,
database_config=database_config
))
if deployment_id:
success(f"Deployment configuration created! ID: {deployment_id}")
deployment_data = {
"Deployment ID": deployment_id,
"Name": name,
"Environment": environment,
"Region": region,
"Instance Type": instance_type,
"Min Instances": min_instances,
"Max Instances": max_instances,
"Desired Instances": desired_instances,
"Port": port,
"Domain": domain,
"Status": "pending",
"Created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(deployment_data, ctx.obj.get('output_format', 'table'))
else:
error("Failed to create deployment configuration")
raise click.Abort()
except Exception as e:
error(f"Error creating deployment: {str(e)}")
raise click.Abort()
@deploy.command()
@click.argument('deployment_id')
@click.pass_context
def start(ctx, deployment_id):
"""Deploy the application to production"""
try:
deployment = ProductionDeployment()
# Deploy application
success_deploy = asyncio.run(deployment.deploy_application(deployment_id))
if success_deploy:
success(f"Deployment {deployment_id} started successfully!")
deployment_data = {
"Deployment ID": deployment_id,
"Status": "running",
"Started": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(deployment_data, ctx.obj.get('output_format', 'table'))
else:
error(f"Failed to start deployment {deployment_id}")
raise click.Abort()
except Exception as e:
error(f"Error starting deployment: {str(e)}")
raise click.Abort()
@deploy.command()
@click.argument('deployment_id')
@click.argument('target_instances', type=int)
@click.option('--reason', default='manual', help='Scaling reason')
@click.pass_context
def scale(ctx, deployment_id, target_instances, reason):
"""Scale a deployment to target instance count"""
try:
deployment = ProductionDeployment()
# Scale deployment
success_scale = asyncio.run(deployment.scale_deployment(deployment_id, target_instances, reason))
if success_scale:
success(f"Deployment {deployment_id} scaled to {target_instances} instances!")
scaling_data = {
"Deployment ID": deployment_id,
"Target Instances": target_instances,
"Reason": reason,
"Status": "completed",
"Scaled": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(scaling_data, ctx.obj.get('output_format', 'table'))
else:
error(f"Failed to scale deployment {deployment_id}")
raise click.Abort()
except Exception as e:
error(f"Error scaling deployment: {str(e)}")
raise click.Abort()
@deploy.command()
@click.argument('deployment_id')
@click.pass_context
def status(ctx, deployment_id):
"""Get comprehensive deployment status"""
try:
deployment = ProductionDeployment()
# Get deployment status
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
if not status_data:
error(f"Deployment {deployment_id} not found")
raise click.Abort()
# Format deployment info
deployment_info = status_data["deployment"]
info_data = [
{"Metric": "Deployment ID", "Value": deployment_info["deployment_id"]},
{"Metric": "Name", "Value": deployment_info["name"]},
{"Metric": "Environment", "Value": deployment_info["environment"]},
{"Metric": "Region", "Value": deployment_info["region"]},
{"Metric": "Instance Type", "Value": deployment_info["instance_type"]},
{"Metric": "Min Instances", "Value": deployment_info["min_instances"]},
{"Metric": "Max Instances", "Value": deployment_info["max_instances"]},
{"Metric": "Desired Instances", "Value": deployment_info["desired_instances"]},
{"Metric": "Port", "Value": deployment_info["port"]},
{"Metric": "Domain", "Value": deployment_info["domain"]},
{"Metric": "Health Status", "Value": "Healthy" if status_data["health_status"] else "Unhealthy"},
{"Metric": "Uptime", "Value": f"{status_data['uptime_percentage']:.2f}%"}
]
output(info_data, ctx.obj.get('output_format', 'table'), title=f"Deployment Status: {deployment_id}")
# Show metrics if available
if status_data["metrics"]:
metrics = status_data["metrics"]
metrics_data = [
{"Metric": "CPU Usage", "Value": f"{metrics['cpu_usage']:.1f}%"},
{"Metric": "Memory Usage", "Value": f"{metrics['memory_usage']:.1f}%"},
{"Metric": "Disk Usage", "Value": f"{metrics['disk_usage']:.1f}%"},
{"Metric": "Request Count", "Value": metrics['request_count']},
{"Metric": "Error Rate", "Value": f"{metrics['error_rate']:.2f}%"},
{"Metric": "Response Time", "Value": f"{metrics['response_time']:.1f}ms"},
{"Metric": "Active Instances", "Value": metrics['active_instances']}
]
output(metrics_data, ctx.obj.get('output_format', 'table'), title="Performance Metrics")
# Show recent scaling events
if status_data["recent_scaling_events"]:
events = status_data["recent_scaling_events"]
events_data = [
{
"Event ID": event["event_id"][:8],
"Type": event["scaling_type"],
"From": event["old_instances"],
"To": event["new_instances"],
"Reason": event["trigger_reason"],
"Success": "Yes" if event["success"] else "No",
"Time": event["triggered_at"]
}
for event in events
]
output(events_data, ctx.obj.get('output_format', 'table'), title="Recent Scaling Events")
except Exception as e:
error(f"Error getting deployment status: {str(e)}")
raise click.Abort()
@deploy.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def overview(ctx, format):
"""Get overview of all deployments"""
try:
deployment = ProductionDeployment()
# Get cluster overview
overview_data = asyncio.run(deployment.get_cluster_overview())
if not overview_data:
error("No deployment data available")
raise click.Abort()
# Cluster metrics
cluster_data = [
{"Metric": "Total Deployments", "Value": overview_data["total_deployments"]},
{"Metric": "Running Deployments", "Value": overview_data["running_deployments"]},
{"Metric": "Total Instances", "Value": overview_data["total_instances"]},
{"Metric": "Health Check Coverage", "Value": f"{overview_data['health_check_coverage']:.1%}"},
{"Metric": "Recent Scaling Events", "Value": overview_data["recent_scaling_events"]},
{"Metric": "Scaling Success Rate", "Value": f"{overview_data['successful_scaling_rate']:.1%}"}
]
output(cluster_data, ctx.obj.get('output_format', format), title="Cluster Overview")
# Aggregate metrics
if "aggregate_metrics" in overview_data:
metrics = overview_data["aggregate_metrics"]
metrics_data = [
{"Metric": "Average CPU Usage", "Value": f"{metrics['total_cpu_usage']:.1f}%"},
{"Metric": "Average Memory Usage", "Value": f"{metrics['total_memory_usage']:.1f}%"},
{"Metric": "Average Disk Usage", "Value": f"{metrics['total_disk_usage']:.1f}%"},
{"Metric": "Average Response Time", "Value": f"{metrics['average_response_time']:.1f}ms"},
{"Metric": "Average Error Rate", "Value": f"{metrics['average_error_rate']:.2f}%"},
{"Metric": "Average Uptime", "Value": f"{metrics['average_uptime']:.1f}%"}
]
output(metrics_data, ctx.obj.get('output_format', format), title="Aggregate Performance Metrics")
except Exception as e:
error(f"Error getting cluster overview: {str(e)}")
raise click.Abort()
@deploy.command()
@click.argument('deployment_id')
@click.option('--interval', default=60, help='Update interval in seconds')
@click.pass_context
def monitor(ctx, deployment_id, interval):
"""Monitor deployment performance in real-time"""
try:
deployment = ProductionDeployment()
# Real-time monitoring
from rich.console import Console
from rich.live import Live
from rich.table import Table
import time
console = Console()
def generate_monitor_table():
try:
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
if not status_data:
return f"Deployment {deployment_id} not found"
deployment_info = status_data["deployment"]
metrics = status_data.get("metrics")
table = Table(title=f"Deployment Monitor - {deployment_info['name']} ({deployment_id[:8]}) - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
table.add_column("Metric", style="cyan")
table.add_column("Value", style="green")
table.add_row("Environment", deployment_info["environment"])
table.add_row("Desired Instances", str(deployment_info["desired_instances"]))
table.add_row("Health Status", "✅ Healthy" if status_data["health_status"] else "❌ Unhealthy")
table.add_row("Uptime", f"{status_data['uptime_percentage']:.2f}%")
if metrics:
table.add_row("CPU Usage", f"{metrics['cpu_usage']:.1f}%")
table.add_row("Memory Usage", f"{metrics['memory_usage']:.1f}%")
table.add_row("Disk Usage", f"{metrics['disk_usage']:.1f}%")
table.add_row("Request Count", str(metrics['request_count']))
table.add_row("Error Rate", f"{metrics['error_rate']:.2f}%")
table.add_row("Response Time", f"{metrics['response_time']:.1f}ms")
table.add_row("Active Instances", str(metrics['active_instances']))
return table
except Exception as e:
return f"Error getting deployment data: {e}"
with Live(generate_monitor_table(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_table())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()
@deploy.command()
@click.argument('deployment_id')
@click.pass_context
def auto_scale(ctx, deployment_id):
"""Trigger auto-scaling evaluation for a deployment"""
try:
deployment = ProductionDeployment()
# Trigger auto-scaling
success_auto = asyncio.run(deployment.auto_scale_deployment(deployment_id))
if success_auto:
success(f"Auto-scaling evaluation completed for deployment {deployment_id}")
else:
error(f"Auto-scaling evaluation failed for deployment {deployment_id}")
raise click.Abort()
except Exception as e:
error(f"Error in auto-scaling: {str(e)}")
raise click.Abort()
@deploy.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def list_deployments(ctx, format):
"""List all deployments"""
try:
deployment = ProductionDeployment()
# Get all deployment statuses
deployments = []
for deployment_id in deployment.deployments.keys():
status_data = asyncio.run(deployment.get_deployment_status(deployment_id))
if status_data:
deployment_info = status_data["deployment"]
deployments.append({
"Deployment ID": deployment_info["deployment_id"][:8],
"Name": deployment_info["name"],
"Environment": deployment_info["environment"],
"Instances": f"{deployment_info['desired_instances']}/{deployment_info['max_instances']}",
"Status": "Running" if status_data["health_status"] else "Stopped",
"Uptime": f"{status_data['uptime_percentage']:.1f}%",
"Created": deployment_info["created_at"]
})
if not deployments:
output("No deployments found", ctx.obj.get('output_format', 'table'))
return
output(deployments, ctx.obj.get('output_format', format), title="All Deployments")
except Exception as e:
error(f"Error listing deployments: {str(e)}")
raise click.Abort()

View File

@@ -23,7 +23,7 @@ def rates(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/exchange/rates",
f"{config.coordinator_url}/exchange/rates",
timeout=10
)
@@ -65,7 +65,7 @@ def create_payment(ctx, aitbc_amount: Optional[float], btc_amount: Optional[floa
try:
with httpx.Client() as client:
rates_response = client.get(
f"{config.coordinator_url}/v1/exchange/rates",
f"{config.coordinator_url}/exchange/rates",
timeout=10
)
@@ -94,7 +94,7 @@ def create_payment(ctx, aitbc_amount: Optional[float], btc_amount: Optional[floa
# Create payment
response = client.post(
f"{config.coordinator_url}/v1/exchange/create-payment",
f"{config.coordinator_url}/exchange/create-payment",
json=payment_data,
timeout=10
)
@@ -124,7 +124,7 @@ def payment_status(ctx, payment_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/exchange/payment-status/{payment_id}",
f"{config.coordinator_url}/exchange/payment-status/{payment_id}",
timeout=10
)
@@ -158,7 +158,7 @@ def market_stats(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/exchange/market-stats",
f"{config.coordinator_url}/exchange/market-stats",
timeout=10
)
@@ -187,7 +187,7 @@ def balance(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/exchange/wallet/balance",
f"{config.coordinator_url}/exchange/wallet/balance",
timeout=10
)
@@ -210,7 +210,7 @@ def info(ctx):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/exchange/wallet/info",
f"{config.coordinator_url}/exchange/wallet/info",
timeout=10
)

View File

@@ -0,0 +1,407 @@
"""Genesis block generation commands for AITBC CLI"""
import click
import json
import yaml
from pathlib import Path
from datetime import datetime
from ..core.genesis_generator import GenesisGenerator, GenesisValidationError
from ..core.config import MultiChainConfig, load_multichain_config
from ..models.chain import GenesisConfig
from ..utils import output, error, success
@click.group()
def genesis():
"""Genesis block generation and management commands"""
pass
@genesis.command()
@click.argument('config_file', type=click.Path(exists=True))
@click.option('--output', '-o', help='Output file path')
@click.option('--template', help='Use predefined template')
@click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Output format')
@click.pass_context
def create(ctx, config_file, output, template, format):
"""Create genesis block from configuration"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
if template:
# Create from template
genesis_block = generator.create_from_template(template, config_file)
else:
# Create from configuration file
with open(config_file, 'r') as f:
config_data = yaml.safe_load(f)
genesis_config = GenesisConfig(**config_data['genesis'])
genesis_block = generator.create_genesis(genesis_config)
# Determine output file
if output is None:
chain_id = genesis_block.chain_id
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
output = f"genesis_{chain_id}_{timestamp}.{format}"
# Save genesis block
output_path = Path(output)
output_path.parent.mkdir(parents=True, exist_ok=True)
if format == 'yaml':
with open(output_path, 'w') as f:
yaml.dump(genesis_block.dict(), f, default_flow_style=False, indent=2)
else:
with open(output_path, 'w') as f:
json.dump(genesis_block.dict(), f, indent=2)
success("Genesis block created successfully!")
result = {
"Chain ID": genesis_block.chain_id,
"Chain Type": genesis_block.chain_type.value,
"Purpose": genesis_block.purpose,
"Name": genesis_block.name,
"Genesis Hash": genesis_block.hash,
"Output File": output,
"Format": format
}
output(result, ctx.obj.get('output_format', 'table'))
if genesis_block.privacy.visibility == "private":
success("Private chain genesis created! Use access codes to invite participants.")
except GenesisValidationError as e:
error(f"Genesis validation error: {str(e)}")
raise click.Abort()
except Exception as e:
error(f"Error creating genesis block: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('genesis_file', type=click.Path(exists=True))
@click.pass_context
def validate(ctx, genesis_file):
"""Validate genesis block integrity"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
# Load genesis block
genesis_path = Path(genesis_file)
if genesis_path.suffix.lower() in ['.yaml', '.yml']:
with open(genesis_path, 'r') as f:
genesis_data = yaml.safe_load(f)
else:
with open(genesis_path, 'r') as f:
genesis_data = json.load(f)
from ..models.chain import GenesisBlock
genesis_block = GenesisBlock(**genesis_data)
# Validate genesis block
validation_result = generator.validate_genesis(genesis_block)
if validation_result.is_valid:
success("Genesis block is valid!")
# Show validation details
checks_data = [
{
"Check": check,
"Status": "✓ Pass" if passed else "✗ Fail"
}
for check, passed in validation_result.checks.items()
]
output(checks_data, ctx.obj.get('output_format', 'table'), title="Validation Results")
else:
error("Genesis block validation failed!")
# Show errors
errors_data = [
{
"Error": error_msg
}
for error_msg in validation_result.errors
]
output(errors_data, ctx.obj.get('output_format', 'table'), title="Validation Errors")
# Show failed checks
failed_checks = [
{
"Check": check,
"Status": "✗ Fail"
}
for check, passed in validation_result.checks.items()
if not passed
]
if failed_checks:
output(failed_checks, ctx.obj.get('output_format', 'table'), title="Failed Checks")
raise click.Abort()
except Exception as e:
error(f"Error validating genesis block: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('genesis_file', type=click.Path(exists=True))
@click.pass_context
def info(ctx, genesis_file):
"""Show genesis block information"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
genesis_info = generator.get_genesis_info(genesis_file)
# Basic information
basic_info = {
"Chain ID": genesis_info["chain_id"],
"Chain Type": genesis_info["chain_type"],
"Purpose": genesis_info["purpose"],
"Name": genesis_info["name"],
"Description": genesis_info.get("description", "No description"),
"Created": genesis_info["created"],
"Genesis Hash": genesis_info["genesis_hash"],
"State Root": genesis_info["state_root"]
}
output(basic_info, ctx.obj.get('output_format', 'table'), title="Genesis Block Information")
# Configuration details
config_info = {
"Consensus Algorithm": genesis_info["consensus_algorithm"],
"Block Time": f"{genesis_info['block_time']}s",
"Gas Limit": f"{genesis_info['gas_limit']:,}",
"Gas Price": f"{genesis_info['gas_price'] / 1e9:.1f} gwei",
"Accounts Count": genesis_info["accounts_count"],
"Contracts Count": genesis_info["contracts_count"]
}
output(config_info, ctx.obj.get('output_format', 'table'), title="Configuration Details")
# Privacy settings
privacy_info = {
"Visibility": genesis_info["privacy_visibility"],
"Access Control": genesis_info["access_control"]
}
output(privacy_info, ctx.obj.get('output_format', 'table'), title="Privacy Settings")
# File information
file_info = {
"File Size": f"{genesis_info['file_size']:,} bytes",
"File Format": genesis_info["file_format"]
}
output(file_info, ctx.obj.get('output_format', 'table'), title="File Information")
except Exception as e:
error(f"Error getting genesis info: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('genesis_file', type=click.Path(exists=True))
@click.pass_context
def hash(ctx, genesis_file):
"""Calculate genesis hash"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
genesis_hash = generator.calculate_genesis_hash(genesis_file)
result = {
"Genesis File": genesis_file,
"Genesis Hash": genesis_hash
}
output(result, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error calculating genesis hash: {str(e)}")
raise click.Abort()
@genesis.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def templates(ctx, format):
"""List available genesis templates"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
templates = generator.list_templates()
if not templates:
output("No templates found", ctx.obj.get('output_format', 'table'))
return
if format == 'json':
output(templates, ctx.obj.get('output_format', 'table'))
else:
templates_data = [
{
"Template": template_name,
"Description": template_info["description"],
"Chain Type": template_info["chain_type"],
"Purpose": template_info["purpose"]
}
for template_name, template_info in templates.items()
]
output(templates_data, ctx.obj.get('output_format', 'table'), title="Available Templates")
except Exception as e:
error(f"Error listing templates: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('template_name')
@click.option('--output', '-o', help='Output file path')
@click.pass_context
def template_info(ctx, template_name, output):
"""Show detailed information about a template"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
templates = generator.list_templates()
if template_name not in templates:
error(f"Template {template_name} not found")
raise click.Abort()
template_info = templates[template_name]
info_data = {
"Template Name": template_name,
"Description": template_info["description"],
"Chain Type": template_info["chain_type"],
"Purpose": template_info["purpose"],
"File Path": template_info["file_path"]
}
output(info_data, ctx.obj.get('output_format', 'table'), title=f"Template Information: {template_name}")
# Show template content if requested
if output:
template_path = Path(template_info["file_path"])
if template_path.exists():
with open(template_path, 'r') as f:
template_content = f.read()
output_path = Path(output)
output_path.write_text(template_content)
success(f"Template content saved to {output}")
except Exception as e:
error(f"Error getting template info: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('chain_id')
@click.option('--format', type=click.Choice(['json', 'yaml']), default='json', help='Export format')
@click.option('--output', '-o', help='Output file path')
@click.pass_context
def export(ctx, chain_id, format, output):
"""Export genesis block for a chain"""
try:
config = load_multichain_config()
generator = GenesisGenerator(config)
genesis_data = generator.export_genesis(chain_id, format)
if output:
output_path = Path(output)
output_path.parent.mkdir(parents=True, exist_ok=True)
if format == 'yaml':
# Parse JSON and convert to YAML
parsed_data = json.loads(genesis_data)
with open(output_path, 'w') as f:
yaml.dump(parsed_data, f, default_flow_style=False, indent=2)
else:
output_path.write_text(genesis_data)
success(f"Genesis block exported to {output}")
else:
# Print to stdout
if format == 'yaml':
parsed_data = json.loads(genesis_data)
output(yaml.dump(parsed_data, default_flow_style=False, indent=2),
ctx.obj.get('output_format', 'table'))
else:
output(genesis_data, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error exporting genesis block: {str(e)}")
raise click.Abort()
@genesis.command()
@click.argument('template_name')
@click.argument('output_file')
@click.option('--format', type=click.Choice(['json', 'yaml']), default='yaml', help='Output format')
@click.pass_context
def create_template(ctx, template_name, output_file, format):
"""Create a new genesis template"""
try:
# Basic template structure
template_data = {
"description": f"Genesis template for {template_name}",
"genesis": {
"chain_type": "topic",
"purpose": template_name,
"name": f"{template_name.title()} Chain",
"description": f"A {template_name} chain for AITBC",
"consensus": {
"algorithm": "pos",
"block_time": 5,
"max_validators": 100,
"authorities": []
},
"privacy": {
"visibility": "public",
"access_control": "open",
"require_invitation": False
},
"parameters": {
"max_block_size": 1048576,
"max_gas_per_block": 10000000,
"min_gas_price": 1000000000,
"block_reward": "2000000000000000000"
},
"accounts": [],
"contracts": []
}
}
output_path = Path(output_file)
output_path.parent.mkdir(parents=True, exist_ok=True)
if format == 'yaml':
with open(output_path, 'w') as f:
yaml.dump(template_data, f, default_flow_style=False, indent=2)
else:
with open(output_path, 'w') as f:
json.dump(template_data, f, indent=2)
success(f"Template created: {output_file}")
result = {
"Template Name": template_name,
"Output File": output_file,
"Format": format,
"Chain Type": template_data["genesis"]["chain_type"],
"Purpose": template_data["genesis"]["purpose"]
}
output(result, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error creating template: {str(e)}")
raise click.Abort()

View File

@@ -51,7 +51,7 @@ def register(ctx, name: str, memory: Optional[int], cuda_cores: Optional[int],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/gpu/register",
f"{config.coordinator_url}/marketplace/gpu/register",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or "",
@@ -96,7 +96,7 @@ def list(ctx, available: bool, model: Optional[str], memory_min: Optional[int],
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/gpu/list",
f"{config.coordinator_url}/marketplace/gpu/list",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -120,7 +120,7 @@ def details(ctx, gpu_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/gpu/{gpu_id}",
f"{config.coordinator_url}/marketplace/gpu/{gpu_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -152,7 +152,7 @@ def book(ctx, gpu_id: str, hours: float, job_id: Optional[str]):
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/gpu/{gpu_id}/book",
f"{config.coordinator_url}/marketplace/gpu/{gpu_id}/book",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -180,7 +180,7 @@ def release(ctx, gpu_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/gpu/{gpu_id}/release",
f"{config.coordinator_url}/marketplace/gpu/{gpu_id}/release",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -208,7 +208,7 @@ def orders(ctx, status: Optional[str], limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/orders",
f"{config.coordinator_url}/marketplace/orders",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -232,7 +232,7 @@ def pricing(ctx, model: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/pricing/{model}",
f"{config.coordinator_url}/marketplace/pricing/{model}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -256,7 +256,7 @@ def reviews(ctx, gpu_id: str, limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/gpu/{gpu_id}/reviews",
f"{config.coordinator_url}/marketplace/gpu/{gpu_id}/reviews",
params={"limit": limit},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -291,7 +291,7 @@ def review(ctx, gpu_id: str, rating: int, comment: Optional[str]):
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/gpu/{gpu_id}/reviews",
f"{config.coordinator_url}/marketplace/gpu/{gpu_id}/reviews",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -344,7 +344,7 @@ def submit(ctx, provider: str, capacity: int, price: float, notes: Optional[str]
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/bids",
f"{config.coordinator_url}/marketplace/bids",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -383,7 +383,7 @@ def list(ctx, status: Optional[str], provider: Optional[str], limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/bids",
f"{config.coordinator_url}/marketplace/bids",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -407,7 +407,7 @@ def details(ctx, bid_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/bids/{bid_id}",
f"{config.coordinator_url}/marketplace/bids/{bid_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -455,7 +455,7 @@ def list(ctx, status: Optional[str], gpu_model: Optional[str], price_max: Option
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/offers",
f"{config.coordinator_url}/marketplace/offers",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -499,7 +499,7 @@ def register(ctx, agent_id: str, agent_type: str, capabilities: Optional[str],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/agents/register",
f"{config.coordinator_url}/agents/register",
json=agent_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -538,7 +538,7 @@ def list_agents(ctx, agent_id: Optional[str], agent_type: Optional[str],
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents",
f"{config.coordinator_url}/agents",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -578,7 +578,7 @@ def list_resource(ctx, resource_id: str, resource_type: str, compute_power: floa
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/list",
f"{config.coordinator_url}/marketplace/list",
json=resource_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -617,7 +617,7 @@ def rent(ctx, resource_id: str, consumer_id: str, duration: int, max_price: Opti
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/rent",
f"{config.coordinator_url}/marketplace/rent",
json=rental_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -656,7 +656,7 @@ def execute_contract(ctx, contract_type: str, params: str, gas_limit: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/blockchain/contracts/execute",
f"{config.coordinator_url}/blockchain/contracts/execute",
json=contract_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -691,7 +691,7 @@ def pay(ctx, from_agent: str, to_agent: str, amount: float, payment_type: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/payments/process",
f"{config.coordinator_url}/payments/process",
json=payment_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -715,7 +715,7 @@ def reputation(ctx, agent_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/{agent_id}/reputation",
f"{config.coordinator_url}/agents/{agent_id}/reputation",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -737,7 +737,7 @@ def balance(ctx, agent_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/agents/{agent_id}/balance",
f"{config.coordinator_url}/agents/{agent_id}/balance",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -759,7 +759,7 @@ def analytics(ctx, time_range: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/analytics/marketplace",
f"{config.coordinator_url}/analytics/marketplace",
params={"time_range": time_range},
headers={"X-Api-Key": config.api_key or ""}
)
@@ -808,7 +808,7 @@ def create_proposal(ctx, title: str, description: str, proposal_type: str,
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/proposals/create",
f"{config.coordinator_url}/proposals/create",
json=proposal_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -840,7 +840,7 @@ def vote(ctx, proposal_id: str, vote: str, reasoning: Optional[str]):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/voting/cast-vote",
f"{config.coordinator_url}/voting/cast-vote",
json=vote_data,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -869,7 +869,7 @@ def list_proposals(ctx, status: Optional[str], limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/proposals",
f"{config.coordinator_url}/proposals",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -908,7 +908,7 @@ def load(ctx, concurrent_users: int, rps: int, duration: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/testing/load-test",
f"{config.coordinator_url}/testing/load-test",
json=test_config,
headers={"X-Api-Key": config.api_key or ""}
)

View File

@@ -47,7 +47,7 @@ def list(ctx, nft_version: str, category: Optional[str], tags: Optional[str],
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/advanced/models",
f"{config.coordinator_url}/marketplace/advanced/models",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -105,7 +105,7 @@ def mint(ctx, model_file: str, metadata, price: Optional[float], royalty: float,
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/models/mint",
f"{config.coordinator_url}/marketplace/advanced/models/mint",
headers={"X-Api-Key": config.api_key or ""},
data=nft_data,
files=files
@@ -157,7 +157,7 @@ def update(ctx, nft_id: str, new_version: str, version_notes: str, compatibility
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/models/{nft_id}/update",
f"{config.coordinator_url}/marketplace/advanced/models/{nft_id}/update",
headers={"X-Api-Key": config.api_key or ""},
data=update_data,
files=files
@@ -196,7 +196,7 @@ def verify(ctx, nft_id: str, deep_scan: bool, check_integrity: bool, verify_perf
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/models/{nft_id}/verify",
f"{config.coordinator_url}/marketplace/advanced/models/{nft_id}/verify",
headers={"X-Api-Key": config.api_key or ""},
json=verify_data
)
@@ -253,7 +253,7 @@ def analytics(ctx, period: str, metrics: str, category: Optional[str], output_fo
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/advanced/analytics",
f"{config.coordinator_url}/marketplace/advanced/analytics",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -295,7 +295,7 @@ def benchmark(ctx, model_id: str, competitors: bool, datasets: str, iterations:
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/models/{model_id}/benchmark",
f"{config.coordinator_url}/marketplace/advanced/models/{model_id}/benchmark",
headers={"X-Api-Key": config.api_key or ""},
json=benchmark_data
)
@@ -334,7 +334,7 @@ def trends(ctx, category: Optional[str], forecast: str, confidence: float):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/advanced/trends",
f"{config.coordinator_url}/marketplace/advanced/trends",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -371,7 +371,7 @@ def report(ctx, format: str, email: Optional[str], sections: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/reports/generate",
f"{config.coordinator_url}/marketplace/advanced/reports/generate",
headers={"X-Api-Key": config.api_key or ""},
json=report_data
)
@@ -420,7 +420,7 @@ def bid(ctx, auction_id: str, amount: float, max_auto_bid: Optional[float], prox
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/auctions/{auction_id}/bid",
f"{config.coordinator_url}/marketplace/advanced/auctions/{auction_id}/bid",
headers={"X-Api-Key": config.api_key or ""},
json=bid_data
)
@@ -466,7 +466,7 @@ def royalties(ctx, model_id: str, recipients: str, smart_contract: bool):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/models/{model_id}/royalties",
f"{config.coordinator_url}/marketplace/advanced/models/{model_id}/royalties",
headers={"X-Api-Key": config.api_key or ""},
json=royalty_data
)
@@ -569,7 +569,7 @@ def file(ctx, transaction_id: str, reason: str, evidence, category: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/disputes",
f"{config.coordinator_url}/marketplace/advanced/disputes",
headers={"X-Api-Key": config.api_key or ""},
data=dispute_data,
files=files
@@ -599,7 +599,7 @@ def status(ctx, dispute_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/marketplace/advanced/disputes/{dispute_id}",
f"{config.coordinator_url}/marketplace/advanced/disputes/{dispute_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -634,7 +634,7 @@ def resolve(ctx, dispute_id: str, resolution: str, evidence):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/marketplace/advanced/disputes/{dispute_id}/resolve",
f"{config.coordinator_url}/marketplace/advanced/disputes/{dispute_id}/resolve",
headers={"X-Api-Key": config.api_key or ""},
data=resolution_data,
files=files

View File

@@ -0,0 +1,494 @@
"""Global chain marketplace commands for AITBC CLI"""
import click
import asyncio
import json
from decimal import Decimal
from datetime import datetime
from typing import Optional
from ..core.config import load_multichain_config
from ..core.marketplace import (
GlobalChainMarketplace, ChainType, MarketplaceStatus,
TransactionStatus
)
from ..utils import output, error, success
@click.group()
def marketplace():
"""Global chain marketplace commands"""
pass
@marketplace.command()
@click.argument('chain_id')
@click.argument('chain_name')
@click.argument('chain_type')
@click.argument('description')
@click.argument('seller_id')
@click.argument('price')
@click.option('--currency', default='ETH', help='Currency for pricing')
@click.option('--specs', help='Chain specifications (JSON string)')
@click.option('--metadata', help='Additional metadata (JSON string)')
@click.pass_context
def list(ctx, chain_id, chain_name, chain_type, description, seller_id, price, currency, specs, metadata):
"""List a chain for sale in the marketplace"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Parse chain type
try:
chain_type_enum = ChainType(chain_type)
except ValueError:
error(f"Invalid chain type: {chain_type}")
error(f"Valid types: {[t.value for t in ChainType]}")
raise click.Abort()
# Parse price
try:
price_decimal = Decimal(price)
except:
error("Invalid price format")
raise click.Abort()
# Parse specifications
chain_specs = {}
if specs:
try:
chain_specs = json.loads(specs)
except json.JSONDecodeError:
error("Invalid JSON specifications")
raise click.Abort()
# Parse metadata
metadata_dict = {}
if metadata:
try:
metadata_dict = json.loads(metadata)
except json.JSONDecodeError:
error("Invalid JSON metadata")
raise click.Abort()
# Create listing
listing_id = asyncio.run(marketplace.create_listing(
chain_id, chain_name, chain_type_enum, description,
seller_id, price_decimal, currency, chain_specs, metadata_dict
))
if listing_id:
success(f"Chain listed successfully! Listing ID: {listing_id}")
listing_data = {
"Listing ID": listing_id,
"Chain ID": chain_id,
"Chain Name": chain_name,
"Type": chain_type,
"Price": f"{price} {currency}",
"Seller": seller_id,
"Status": "active",
"Created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(listing_data, ctx.obj.get('output_format', 'table'))
else:
error("Failed to create listing")
raise click.Abort()
except Exception as e:
error(f"Error creating listing: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.argument('listing_id')
@click.argument('buyer_id')
@click.option('--payment', default='crypto', help='Payment method')
@click.pass_context
def buy(ctx, listing_id, buyer_id, payment):
"""Purchase a chain from the marketplace"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Purchase chain
transaction_id = asyncio.run(marketplace.purchase_chain(listing_id, buyer_id, payment))
if transaction_id:
success(f"Purchase initiated! Transaction ID: {transaction_id}")
transaction_data = {
"Transaction ID": transaction_id,
"Listing ID": listing_id,
"Buyer": buyer_id,
"Payment Method": payment,
"Status": "pending",
"Created": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(transaction_data, ctx.obj.get('output_format', 'table'))
else:
error("Failed to purchase chain")
raise click.Abort()
except Exception as e:
error(f"Error purchasing chain: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.argument('transaction_id')
@click.argument('transaction_hash')
@click.pass_context
def complete(ctx, transaction_id, transaction_hash):
"""Complete a marketplace transaction"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Complete transaction
success = asyncio.run(marketplace.complete_transaction(transaction_id, transaction_hash))
if success:
success(f"Transaction {transaction_id} completed successfully!")
transaction_data = {
"Transaction ID": transaction_id,
"Transaction Hash": transaction_hash,
"Status": "completed",
"Completed": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
output(transaction_data, ctx.obj.get('output_format', 'table'))
else:
error(f"Failed to complete transaction {transaction_id}")
raise click.Abort()
except Exception as e:
error(f"Error completing transaction: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.option('--type', help='Filter by chain type')
@click.option('--min-price', help='Minimum price')
@click.option('--max-price', help='Maximum price')
@click.option('--seller', help='Filter by seller ID')
@click.option('--status', help='Filter by listing status')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def search(ctx, type, min_price, max_price, seller, status, format):
"""Search chain listings in the marketplace"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Parse filters
chain_type = None
if type:
try:
chain_type = ChainType(type)
except ValueError:
error(f"Invalid chain type: {type}")
raise click.Abort()
min_price_dec = None
if min_price:
try:
min_price_dec = Decimal(min_price)
except:
error("Invalid minimum price format")
raise click.Abort()
max_price_dec = None
if max_price:
try:
max_price_dec = Decimal(max_price)
except:
error("Invalid maximum price format")
raise click.Abort()
listing_status = None
if status:
try:
listing_status = MarketplaceStatus(status)
except ValueError:
error(f"Invalid status: {status}")
raise click.Abort()
# Search listings
listings = asyncio.run(marketplace.search_listings(
chain_type, min_price_dec, max_price_dec, seller, listing_status
))
if not listings:
output("No listings found matching your criteria", ctx.obj.get('output_format', 'table'))
return
# Format output
listing_data = [
{
"Listing ID": listing.listing_id,
"Chain ID": listing.chain_id,
"Chain Name": listing.chain_name,
"Type": listing.chain_type.value,
"Price": f"{listing.price} {listing.currency}",
"Seller": listing.seller_id,
"Status": listing.status.value,
"Created": listing.created_at.strftime("%Y-%m-%d %H:%M:%S"),
"Expires": listing.expires_at.strftime("%Y-%m-%d %H:%M:%S")
}
for listing in listings
]
output(listing_data, ctx.obj.get('output_format', format), title="Marketplace Listings")
except Exception as e:
error(f"Error searching listings: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.argument('chain_id')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def economy(ctx, chain_id, format):
"""Get economic metrics for a specific chain"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Get chain economy
economy = asyncio.run(marketplace.get_chain_economy(chain_id))
if not economy:
error(f"No economic data available for chain {chain_id}")
raise click.Abort()
# Format output
economy_data = [
{"Metric": "Chain ID", "Value": economy.chain_id},
{"Metric": "Total Value Locked", "Value": f"{economy.total_value_locked} ETH"},
{"Metric": "Daily Volume", "Value": f"{economy.daily_volume} ETH"},
{"Metric": "Market Cap", "Value": f"{economy.market_cap} ETH"},
{"Metric": "Transaction Count", "Value": economy.transaction_count},
{"Metric": "Active Users", "Value": economy.active_users},
{"Metric": "Agent Count", "Value": economy.agent_count},
{"Metric": "Governance Tokens", "Value": f"{economy.governance_tokens}"},
{"Metric": "Staking Rewards", "Value": f"{economy.staking_rewards}"},
{"Metric": "Last Updated", "Value": economy.last_updated.strftime("%Y-%m-%d %H:%M:%S")}
]
output(economy_data, ctx.obj.get('output_format', format), title=f"Chain Economy: {chain_id}")
except Exception as e:
error(f"Error getting chain economy: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.argument('user_id')
@click.option('--role', type=click.Choice(['buyer', 'seller', 'both']), default='both', help='User role')
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def transactions(ctx, user_id, role, format):
"""Get transactions for a specific user"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Get user transactions
transactions = asyncio.run(marketplace.get_user_transactions(user_id, role))
if not transactions:
output(f"No transactions found for user {user_id}", ctx.obj.get('output_format', 'table'))
return
# Format output
transaction_data = [
{
"Transaction ID": transaction.transaction_id,
"Listing ID": transaction.listing_id,
"Chain ID": transaction.chain_id,
"Price": f"{transaction.price} {transaction.currency}",
"Role": "buyer" if transaction.buyer_id == user_id else "seller",
"Counterparty": transaction.seller_id if transaction.buyer_id == user_id else transaction.buyer_id,
"Status": transaction.status.value,
"Created": transaction.created_at.strftime("%Y-%m-%d %H:%M:%S"),
"Completed": transaction.completed_at.strftime("%Y-%m-%d %H:%M:%S") if transaction.completed_at else "N/A"
}
for transaction in transactions
]
output(transaction_data, ctx.obj.get('output_format', format), title=f"Transactions for {user_id}")
except Exception as e:
error(f"Error getting user transactions: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def overview(ctx, format):
"""Get comprehensive marketplace overview"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
# Get marketplace overview
overview = asyncio.run(marketplace.get_marketplace_overview())
if not overview:
error("No marketplace data available")
raise click.Abort()
# Marketplace metrics
if "marketplace_metrics" in overview:
metrics = overview["marketplace_metrics"]
metrics_data = [
{"Metric": "Total Listings", "Value": metrics["total_listings"]},
{"Metric": "Active Listings", "Value": metrics["active_listings"]},
{"Metric": "Total Transactions", "Value": metrics["total_transactions"]},
{"Metric": "Total Volume", "Value": f"{metrics['total_volume']} ETH"},
{"Metric": "Average Price", "Value": f"{metrics['average_price']} ETH"},
{"Metric": "Market Sentiment", "Value": f"{metrics['market_sentiment']:.2f}"}
]
output(metrics_data, ctx.obj.get('output_format', format), title="Marketplace Metrics")
# Volume 24h
if "volume_24h" in overview:
volume_data = [
{"Metric": "24h Volume", "Value": f"{overview['volume_24h']} ETH"}
]
output(volume_data, ctx.obj.get('output_format', format), title="24-Hour Volume")
# Top performing chains
if "top_performing_chains" in overview:
chains = overview["top_performing_chains"]
if chains:
chain_data = [
{
"Chain ID": chain["chain_id"],
"Volume": f"{chain['volume']} ETH",
"Transactions": chain["transactions"]
}
for chain in chains[:5] # Top 5
]
output(chain_data, ctx.obj.get('output_format', format), title="Top Performing Chains")
# Chain types distribution
if "chain_types_distribution" in overview:
distribution = overview["chain_types_distribution"]
if distribution:
dist_data = [
{"Chain Type": chain_type, "Count": count}
for chain_type, count in distribution.items()
]
output(dist_data, ctx.obj.get('output_format', format), title="Chain Types Distribution")
# User activity
if "user_activity" in overview:
activity = overview["user_activity"]
activity_data = [
{"Metric": "Active Buyers (7d)", "Value": activity["active_buyers_7d"]},
{"Metric": "Active Sellers (7d)", "Value": activity["active_sellers_7d"]},
{"Metric": "Total Unique Users", "Value": activity["total_unique_users"]},
{"Metric": "Average Reputation", "Value": f"{activity['average_reputation']:.3f}"}
]
output(activity_data, ctx.obj.get('output_format', format), title="User Activity")
# Escrow summary
if "escrow_summary" in overview:
escrow = overview["escrow_summary"]
escrow_data = [
{"Metric": "Active Escrows", "Value": escrow["active_escrows"]},
{"Metric": "Released Escrows", "Value": escrow["released_escrows"]},
{"Metric": "Total Escrow Value", "Value": f"{escrow['total_escrow_value']} ETH"},
{"Metric": "Escrow Fees Collected", "Value": f"{escrow['escrow_fee_collected']} ETH"}
]
output(escrow_data, ctx.obj.get('output_format', format), title="Escrow Summary")
except Exception as e:
error(f"Error getting marketplace overview: {str(e)}")
raise click.Abort()
@marketplace.command()
@click.option('--realtime', is_flag=True, help='Real-time monitoring')
@click.option('--interval', default=30, help='Update interval in seconds')
@click.pass_context
def monitor(ctx, realtime, interval):
"""Monitor marketplace activity"""
try:
config = load_multichain_config()
marketplace = GlobalChainMarketplace(config)
if realtime:
# Real-time monitoring
from rich.console import Console
from rich.live import Live
from rich.table import Table
import time
console = Console()
def generate_monitor_table():
try:
overview = asyncio.run(marketplace.get_marketplace_overview())
table = Table(title=f"Marketplace Monitor - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
table.add_column("Metric", style="cyan")
table.add_column("Value", style="green")
if "marketplace_metrics" in overview:
metrics = overview["marketplace_metrics"]
table.add_row("Total Listings", str(metrics["total_listings"]))
table.add_row("Active Listings", str(metrics["active_listings"]))
table.add_row("Total Transactions", str(metrics["total_transactions"]))
table.add_row("Total Volume", f"{metrics['total_volume']} ETH")
table.add_row("Market Sentiment", f"{metrics['market_sentiment']:.2f}")
if "volume_24h" in overview:
table.add_row("24h Volume", f"{overview['volume_24h']} ETH")
if "user_activity" in overview:
activity = overview["user_activity"]
table.add_row("Active Users (7d)", str(activity["active_buyers_7d"] + activity["active_sellers_7d"]))
return table
except Exception as e:
return f"Error getting marketplace data: {e}"
with Live(generate_monitor_table(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_table())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
else:
# Single snapshot
overview = asyncio.run(marketplace.get_marketplace_overview())
monitor_data = []
if "marketplace_metrics" in overview:
metrics = overview["marketplace_metrics"]
monitor_data.extend([
{"Metric": "Total Listings", "Value": metrics["total_listings"]},
{"Metric": "Active Listings", "Value": metrics["active_listings"]},
{"Metric": "Total Transactions", "Value": metrics["total_transactions"]},
{"Metric": "Total Volume", "Value": f"{metrics['total_volume']} ETH"},
{"Metric": "Market Sentiment", "Value": f"{metrics['market_sentiment']:.2f}"}
])
if "volume_24h" in overview:
monitor_data.append({"Metric": "24h Volume", "Value": f"{overview['volume_24h']} ETH"})
if "user_activity" in overview:
activity = overview["user_activity"]
monitor_data.append({"Metric": "Active Users (7d)", "Value": activity["active_buyers_7d"] + activity["active_sellers_7d"]})
output(monitor_data, ctx.obj.get('output_format', 'table'), title="Marketplace Monitor")
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()

View File

@@ -49,7 +49,7 @@ def register(ctx, gpu: Optional[str], memory: Optional[int],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/miners/register?miner_id={miner_id}",
f"{config.coordinator_url}/miners/register?miner_id={miner_id}",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -80,7 +80,7 @@ def poll(ctx, wait: int, miner_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/miners/poll",
f"{config.coordinator_url}/miners/poll",
headers={
"X-Api-Key": config.api_key or "",
"X-Miner-ID": miner_id
@@ -116,7 +116,7 @@ def mine(ctx, jobs: int, miner_id: str):
with httpx.Client() as client:
# Poll for job
response = client.get(
f"{config.coordinator_url}/v1/miners/poll",
f"{config.coordinator_url}/miners/poll",
headers={
"X-Api-Key": config.api_key or "",
"X-Miner-ID": miner_id
@@ -139,7 +139,7 @@ def mine(ctx, jobs: int, miner_id: str):
# Submit result
result_response = client.post(
f"{config.coordinator_url}/v1/miners/{job_id}/result",
f"{config.coordinator_url}/miners/{job_id}/result",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or "",
@@ -183,7 +183,7 @@ def heartbeat(ctx, miner_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/miners/heartbeat?miner_id={miner_id}",
f"{config.coordinator_url}/miners/heartbeat?miner_id={miner_id}",
headers={
"X-Api-Key": config.api_key or ""
}
@@ -235,7 +235,7 @@ def earnings(ctx, miner_id: str, from_time: Optional[str], to_time: Optional[str
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/miners/{miner_id}/earnings",
f"{config.coordinator_url}/miners/{miner_id}/earnings",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -281,7 +281,7 @@ def update_capabilities(ctx, gpu: Optional[str], memory: Optional[int],
try:
with httpx.Client() as client:
response = client.put(
f"{config.coordinator_url}/v1/miners/{miner_id}/capabilities",
f"{config.coordinator_url}/miners/{miner_id}/capabilities",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or ""
@@ -319,7 +319,7 @@ def deregister(ctx, miner_id: str, force: bool):
try:
with httpx.Client() as client:
response = client.delete(
f"{config.coordinator_url}/v1/miners/{miner_id}",
f"{config.coordinator_url}/miners/{miner_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -359,7 +359,7 @@ def jobs(ctx, limit: int, job_type: Optional[str], min_reward: Optional[float],
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/miners/{miner_id}/jobs",
f"{config.coordinator_url}/miners/{miner_id}/jobs",
params=params,
headers={"X-Api-Key": config.api_key or ""}
)
@@ -380,7 +380,7 @@ def _process_single_job(config, miner_id: str, worker_id: int) -> Dict[str, Any]
try:
with httpx.Client() as http_client:
response = http_client.get(
f"{config.coordinator_url}/v1/miners/poll",
f"{config.coordinator_url}/miners/poll",
headers={
"X-Api-Key": config.api_key or "",
"X-Miner-ID": miner_id
@@ -395,7 +395,7 @@ def _process_single_job(config, miner_id: str, worker_id: int) -> Dict[str, Any]
time.sleep(2) # Simulate processing
result_response = http_client.post(
f"{config.coordinator_url}/v1/miners/{job_id}/result",
f"{config.coordinator_url}/miners/{job_id}/result",
headers={
"Content-Type": "application/json",
"X-Api-Key": config.api_key or "",

View File

@@ -41,7 +41,7 @@ def dashboard(ctx, refresh: int, duration: int):
# Node status
try:
resp = client.get(
f"{config.coordinator_url}/v1/status",
f"{config.coordinator_url}/status",
headers={"X-Api-Key": config.api_key or ""}
)
if resp.status_code == 200:
@@ -59,7 +59,7 @@ def dashboard(ctx, refresh: int, duration: int):
# Jobs summary
try:
resp = client.get(
f"{config.coordinator_url}/v1/jobs",
f"{config.coordinator_url}/jobs",
headers={"X-Api-Key": config.api_key or ""},
params={"limit": 5}
)
@@ -78,7 +78,7 @@ def dashboard(ctx, refresh: int, duration: int):
# Miners summary
try:
resp = client.get(
f"{config.coordinator_url}/v1/miners",
f"{config.coordinator_url}/miners",
headers={"X-Api-Key": config.api_key or ""}
)
if resp.status_code == 200:
@@ -128,7 +128,7 @@ def metrics(ctx, period: str, export_path: Optional[str]):
# Coordinator metrics
try:
resp = client.get(
f"{config.coordinator_url}/v1/status",
f"{config.coordinator_url}/status",
headers={"X-Api-Key": config.api_key or ""}
)
if resp.status_code == 200:
@@ -142,7 +142,7 @@ def metrics(ctx, period: str, export_path: Optional[str]):
# Job metrics
try:
resp = client.get(
f"{config.coordinator_url}/v1/jobs",
f"{config.coordinator_url}/jobs",
headers={"X-Api-Key": config.api_key or ""},
params={"limit": 100}
)
@@ -161,7 +161,7 @@ def metrics(ctx, period: str, export_path: Optional[str]):
# Miner metrics
try:
resp = client.get(
f"{config.coordinator_url}/v1/miners",
f"{config.coordinator_url}/miners",
headers={"X-Api-Key": config.api_key or ""}
)
if resp.status_code == 200:
@@ -287,7 +287,7 @@ def history(ctx, period: str):
with httpx.Client(timeout=10) as client:
try:
resp = client.get(
f"{config.coordinator_url}/v1/jobs",
f"{config.coordinator_url}/jobs",
headers={"X-Api-Key": config.api_key or ""},
params={"limit": 500}
)

View File

@@ -48,7 +48,7 @@ def agent(ctx, name: str, modalities: str, description: str, model_config, gpu_a
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents",
f"{config.coordinator_url}/multimodal/agents",
headers={"X-Api-Key": config.api_key or ""},
json=agent_data
)
@@ -138,7 +138,7 @@ def process(ctx, agent_id: str, text: Optional[str], image: Optional[str],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/process",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/process",
headers={"X-Api-Key": config.api_key or ""},
json=process_data
)
@@ -176,7 +176,7 @@ def benchmark(ctx, agent_id: str, dataset: str, metrics: str, iterations: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/benchmark",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/benchmark",
headers={"X-Api-Key": config.api_key or ""},
json=benchmark_data
)
@@ -213,7 +213,7 @@ def optimize(ctx, agent_id: str, objective: str, target: Optional[str]):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/optimize",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/optimize",
headers={"X-Api-Key": config.api_key or ""},
json=optimization_data
)
@@ -274,7 +274,7 @@ def convert(ctx, input_path: str, output_format: str, model: str, output_file: O
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/convert",
f"{config.coordinator_url}/multimodal/convert",
headers={"X-Api-Key": config.api_key or ""},
json=conversion_data
)
@@ -329,7 +329,7 @@ def search(ctx, query: str, modalities: str, limit: int, threshold: float):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/search",
f"{config.coordinator_url}/multimodal/search",
headers={"X-Api-Key": config.api_key or ""},
json=search_data
)
@@ -378,7 +378,7 @@ def attention(ctx, agent_id: str, inputs, visualize: bool, output: Optional[str]
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/attention",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/attention",
headers={"X-Api-Key": config.api_key or ""},
json=attention_data
)
@@ -414,7 +414,7 @@ def capabilities(ctx, agent_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/capabilities",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/capabilities",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -451,7 +451,7 @@ def test(ctx, agent_id: str, modality: str, test_data):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/multimodal/agents/{agent_id}/test/{modality}",
f"{config.coordinator_url}/multimodal/agents/{agent_id}/test/{modality}",
headers={"X-Api-Key": config.api_key or ""},
json=test_input
)

View File

@@ -0,0 +1,439 @@
"""Node management commands for AITBC CLI"""
import click
from typing import Optional
from ..core.config import MultiChainConfig, load_multichain_config, get_default_node_config, add_node_config, remove_node_config
from ..core.node_client import NodeClient
from ..utils import output, error, success
@click.group()
def node():
"""Node management commands"""
pass
@node.command()
@click.argument('node_id')
@click.pass_context
def info(ctx, node_id):
"""Get detailed node information"""
try:
config = load_multichain_config()
if node_id not in config.nodes:
error(f"Node {node_id} not found in configuration")
raise click.Abort()
node_config = config.nodes[node_id]
import asyncio
async def get_node_info():
async with NodeClient(node_config) as client:
return await client.get_node_info()
node_info = asyncio.run(get_node_info())
# Basic node information
basic_info = {
"Node ID": node_info["node_id"],
"Node Type": node_info["type"],
"Status": node_info["status"],
"Version": node_info["version"],
"Uptime": f"{node_info['uptime_days']} days, {node_info['uptime_hours']} hours",
"Endpoint": node_config.endpoint
}
output(basic_info, ctx.obj.get('output_format', 'table'), title=f"Node Information: {node_id}")
# Performance metrics
metrics = {
"CPU Usage": f"{node_info['cpu_usage']}%",
"Memory Usage": f"{node_info['memory_usage_mb']:.1f}MB",
"Disk Usage": f"{node_info['disk_usage_mb']:.1f}MB",
"Network In": f"{node_info['network_in_mb']:.1f}MB/s",
"Network Out": f"{node_info['network_out_mb']:.1f}MB/s"
}
output(metrics, ctx.obj.get('output_format', 'table'), title="Performance Metrics")
# Hosted chains
if node_info.get("hosted_chains"):
chains_data = [
{
"Chain ID": chain_id,
"Type": chain.get("type", "unknown"),
"Status": chain.get("status", "unknown")
}
for chain_id, chain in node_info["hosted_chains"].items()
]
output(chains_data, ctx.obj.get('output_format', 'table'), title="Hosted Chains")
except Exception as e:
error(f"Error getting node info: {str(e)}")
raise click.Abort()
@node.command()
@click.option('--show-private', is_flag=True, help='Show private chains')
@click.option('--node-id', help='Specific node ID to query')
@click.pass_context
def chains(ctx, show_private, node_id):
"""List chains hosted on all nodes"""
try:
config = load_multichain_config()
all_chains = []
import asyncio
async def get_all_chains():
tasks = []
for nid, node_config in config.nodes.items():
if node_id and nid != node_id:
continue
async def get_chains_for_node(nid, nconfig):
try:
async with NodeClient(nconfig) as client:
chains = await client.get_hosted_chains()
return [(nid, chain) for chain in chains]
except Exception as e:
print(f"Error getting chains from node {nid}: {e}")
return []
tasks.append(get_chains_for_node(node_id, node_config))
results = await asyncio.gather(*tasks)
for result in results:
all_chains.extend(result)
asyncio.run(get_all_chains())
if not all_chains:
output("No chains found on any node", ctx.obj.get('output_format', 'table'))
return
# Filter private chains if not requested
if not show_private:
all_chains = [(node_id, chain) for node_id, chain in all_chains
if chain.privacy.visibility != "private"]
# Format output
chains_data = [
{
"Node ID": node_id,
"Chain ID": chain.id,
"Type": chain.type.value,
"Purpose": chain.purpose,
"Name": chain.name,
"Status": chain.status.value,
"Block Height": chain.block_height,
"Size": f"{chain.size_mb:.1f}MB"
}
for node_id, chain in all_chains
]
output(chains_data, ctx.obj.get('output_format', 'table'), title="Chains by Node")
except Exception as e:
error(f"Error listing chains: {str(e)}")
raise click.Abort()
@node.command()
@click.option('--format', type=click.Choice(['table', 'json']), default='table', help='Output format')
@click.pass_context
def list(ctx, format):
"""List all configured nodes"""
try:
config = load_multichain_config()
if not config.nodes:
output("No nodes configured", ctx.obj.get('output_format', 'table'))
return
nodes_data = [
{
"Node ID": node_id,
"Endpoint": node_config.endpoint,
"Timeout": f"{node_config.timeout}s",
"Max Connections": node_config.max_connections,
"Retry Count": node_config.retry_count
}
for node_id, node_config in config.nodes.items()
]
output(nodes_data, ctx.obj.get('output_format', 'table'), title="Configured Nodes")
except Exception as e:
error(f"Error listing nodes: {str(e)}")
raise click.Abort()
@node.command()
@click.argument('node_id')
@click.argument('endpoint')
@click.option('--timeout', default=30, help='Request timeout in seconds')
@click.option('--max-connections', default=10, help='Maximum concurrent connections')
@click.option('--retry-count', default=3, help='Number of retry attempts')
@click.pass_context
def add(ctx, node_id, endpoint, timeout, max_connections, retry_count):
"""Add a new node to configuration"""
try:
config = load_multichain_config()
if node_id in config.nodes:
error(f"Node {node_id} already exists")
raise click.Abort()
node_config = get_default_node_config()
node_config.id = node_id
node_config.endpoint = endpoint
node_config.timeout = timeout
node_config.max_connections = max_connections
node_config.retry_count = retry_count
config = add_node_config(config, node_config)
from ..core.config import save_multichain_config
save_multichain_config(config)
success(f"Node {node_id} added successfully!")
result = {
"Node ID": node_id,
"Endpoint": endpoint,
"Timeout": f"{timeout}s",
"Max Connections": max_connections,
"Retry Count": retry_count
}
output(result, ctx.obj.get('output_format', 'table'))
except Exception as e:
error(f"Error adding node: {str(e)}")
raise click.Abort()
@node.command()
@click.argument('node_id')
@click.option('--force', is_flag=True, help='Force removal without confirmation')
@click.pass_context
def remove(ctx, node_id, force):
"""Remove a node from configuration"""
try:
config = load_multichain_config()
if node_id not in config.nodes:
error(f"Node {node_id} not found")
raise click.Abort()
if not force:
# Show node information before removal
node_config = config.nodes[node_id]
node_info = {
"Node ID": node_id,
"Endpoint": node_config.endpoint,
"Timeout": f"{node_config.timeout}s",
"Max Connections": node_config.max_connections
}
output(node_info, ctx.obj.get('output_format', 'table'), title="Node to Remove")
if not click.confirm(f"Are you sure you want to remove node {node_id}?"):
raise click.Abort()
config = remove_node_config(config, node_id)
from ..core.config import save_multichain_config
save_multichain_config(config)
success(f"Node {node_id} removed successfully!")
except Exception as e:
error(f"Error removing node: {str(e)}")
raise click.Abort()
@node.command()
@click.argument('node_id')
@click.option('--realtime', is_flag=True, help='Real-time monitoring')
@click.option('--interval', default=5, help='Update interval in seconds')
@click.pass_context
def monitor(ctx, node_id, realtime, interval):
"""Monitor node activity"""
try:
config = load_multichain_config()
if node_id not in config.nodes:
error(f"Node {node_id} not found")
raise click.Abort()
node_config = config.nodes[node_id]
import asyncio
from rich.console import Console
from rich.layout import Layout
from rich.live import Live
import time
console = Console()
async def get_node_stats():
async with NodeClient(node_config) as client:
node_info = await client.get_node_info()
return node_info
if realtime:
# Real-time monitoring
def generate_monitor_layout():
try:
node_info = asyncio.run(get_node_stats())
layout = Layout()
layout.split_column(
Layout(name="header", size=3),
Layout(name="metrics"),
Layout(name="chains", size=10)
)
# Header
layout["header"].update(
f"Node Monitor: {node_id} - {node_info['status'].upper()}"
)
# Metrics table
metrics_data = [
["CPU Usage", f"{node_info['cpu_usage']}%"],
["Memory Usage", f"{node_info['memory_usage_mb']:.1f}MB"],
["Disk Usage", f"{node_info['disk_usage_mb']:.1f}MB"],
["Network In", f"{node_info['network_in_mb']:.1f}MB/s"],
["Network Out", f"{node_info['network_out_mb']:.1f}MB/s"],
["Uptime", f"{node_info['uptime_days']}d {node_info['uptime_hours']}h"]
]
layout["metrics"].update(str(metrics_data))
# Chains info
if node_info.get("hosted_chains"):
chains_text = f"Hosted Chains: {len(node_info['hosted_chains'])}\n"
for chain_id, chain in list(node_info["hosted_chains"].items())[:5]:
chains_text += f"{chain_id} ({chain.get('status', 'unknown')})\n"
layout["chains"].update(chains_text)
else:
layout["chains"].update("No chains hosted")
return layout
except Exception as e:
return f"Error getting node stats: {e}"
with Live(generate_monitor_layout(), refresh_per_second=1) as live:
try:
while True:
live.update(generate_monitor_layout())
time.sleep(interval)
except KeyboardInterrupt:
console.print("\n[yellow]Monitoring stopped by user[/yellow]")
else:
# Single snapshot
node_info = asyncio.run(get_node_stats())
stats_data = [
{
"Metric": "CPU Usage",
"Value": f"{node_info['cpu_usage']}%"
},
{
"Metric": "Memory Usage",
"Value": f"{node_info['memory_usage_mb']:.1f}MB"
},
{
"Metric": "Disk Usage",
"Value": f"{node_info['disk_usage_mb']:.1f}MB"
},
{
"Metric": "Network In",
"Value": f"{node_info['network_in_mb']:.1f}MB/s"
},
{
"Metric": "Network Out",
"Value": f"{node_info['network_out_mb']:.1f}MB/s"
},
{
"Metric": "Uptime",
"Value": f"{node_info['uptime_days']}d {node_info['uptime_hours']}h"
}
]
output(stats_data, ctx.obj.get('output_format', 'table'), title=f"Node Statistics: {node_id}")
except Exception as e:
error(f"Error during monitoring: {str(e)}")
raise click.Abort()
@node.command()
@click.argument('node_id')
@click.pass_context
def test(ctx, node_id):
"""Test connectivity to a node"""
try:
config = load_multichain_config()
if node_id not in config.nodes:
error(f"Node {node_id} not found")
raise click.Abort()
node_config = config.nodes[node_id]
import asyncio
async def test_node():
try:
async with NodeClient(node_config) as client:
node_info = await client.get_node_info()
chains = await client.get_hosted_chains()
return {
"connected": True,
"node_id": node_info["node_id"],
"status": node_info["status"],
"version": node_info["version"],
"chains_count": len(chains)
}
except Exception as e:
return {
"connected": False,
"error": str(e)
}
result = asyncio.run(test_node())
if result["connected"]:
success(f"Successfully connected to node {node_id}!")
test_data = [
{
"Test": "Connection",
"Status": "✓ Pass"
},
{
"Test": "Node ID",
"Status": result["node_id"]
},
{
"Test": "Status",
"Status": result["status"]
},
{
"Test": "Version",
"Status": result["version"]
},
{
"Test": "Chains",
"Status": f"{result['chains_count']} hosted"
}
]
output(test_data, ctx.obj.get('output_format', 'table'), title=f"Node Test Results: {node_id}")
else:
error(f"Failed to connect to node {node_id}: {result['error']}")
raise click.Abort()
except Exception as e:
error(f"Error testing node: {str(e)}")
raise click.Abort()

View File

@@ -31,8 +31,8 @@ openclaw.add_command(deploy)
@click.option("--edge-locations", help="Comma-separated edge locations")
@click.option("--auto-scale", is_flag=True, help="Enable auto-scaling")
@click.pass_context
def deploy(ctx, agent_id: str, region: str, instances: int, instance_type: str,
edge_locations: Optional[str], auto_scale: bool):
def deploy_agent(ctx, agent_id: str, region: str, instances: int, instance_type: str,
edge_locations: Optional[str], auto_scale: bool):
"""Deploy agent to OpenClaw network"""
config = ctx.obj['config']
@@ -50,7 +50,7 @@ def deploy(ctx, agent_id: str, region: str, instances: int, instance_type: str,
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/deploy",
f"{config.coordinator_url}/openclaw/deploy",
headers={"X-Api-Key": config.api_key or ""},
json=deployment_data
)
@@ -69,7 +69,6 @@ def deploy(ctx, agent_id: str, region: str, instances: int, instance_type: str,
ctx.exit(1)
@deploy.command()
@click.argument("deployment_id")
@click.option("--instances", required=True, type=int, help="New number of instances")
@click.option("--auto-scale", is_flag=True, help="Enable auto-scaling")
@@ -90,7 +89,7 @@ def scale(ctx, deployment_id: str, instances: int, auto_scale: bool, min_instanc
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/deployments/{deployment_id}/scale",
f"{config.coordinator_url}/openclaw/deployments/{deployment_id}/scale",
headers={"X-Api-Key": config.api_key or ""},
json=scale_data
)
@@ -124,7 +123,7 @@ def optimize(ctx, deployment_id: str, objective: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/deployments/{deployment_id}/optimize",
f"{config.coordinator_url}/openclaw/deployments/{deployment_id}/optimize",
headers={"X-Api-Key": config.api_key or ""},
json=optimization_data
)
@@ -168,7 +167,7 @@ def monitor(ctx, deployment_id: str, metrics: str, real_time: bool, interval: in
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/deployments/{deployment_id}/metrics",
f"{config.coordinator_url}/openclaw/deployments/{deployment_id}/metrics",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -218,7 +217,7 @@ def status(ctx, deployment_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/deployments/{deployment_id}/status",
f"{config.coordinator_url}/openclaw/deployments/{deployment_id}/status",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -264,7 +263,7 @@ def deploy(ctx, agent_id: str, locations: str, strategy: str, replicas: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/edge/deploy",
f"{config.coordinator_url}/openclaw/edge/deploy",
headers={"X-Api-Key": config.api_key or ""},
json=edge_data
)
@@ -297,7 +296,7 @@ def resources(ctx, location: Optional[str]):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/edge/resources",
f"{config.coordinator_url}/openclaw/edge/resources",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -335,7 +334,7 @@ def optimize(ctx, deployment_id: str, latency_target: Optional[int],
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/edge/deployments/{deployment_id}/optimize",
f"{config.coordinator_url}/openclaw/edge/deployments/{deployment_id}/optimize",
headers={"X-Api-Key": config.api_key or ""},
json=optimization_data
)
@@ -369,7 +368,7 @@ def compliance(ctx, deployment_id: str, standards: Optional[str]):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/edge/deployments/{deployment_id}/compliance",
f"{config.coordinator_url}/openclaw/edge/deployments/{deployment_id}/compliance",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -412,7 +411,7 @@ def optimize(ctx, deployment_id: str, algorithm: str, weights: Optional[str]):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/routing/deployments/{deployment_id}/optimize",
f"{config.coordinator_url}/openclaw/routing/deployments/{deployment_id}/optimize",
headers={"X-Api-Key": config.api_key or ""},
json=routing_data
)
@@ -441,7 +440,7 @@ def status(ctx, deployment_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/routing/deployments/{deployment_id}/status",
f"{config.coordinator_url}/openclaw/routing/deployments/{deployment_id}/status",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -490,7 +489,7 @@ def create(ctx, name: str, type: str, description: str, package):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/ecosystem/solutions",
f"{config.coordinator_url}/openclaw/ecosystem/solutions",
headers={"X-Api-Key": config.api_key or ""},
data=solution_data,
files=files
@@ -528,7 +527,7 @@ def list(ctx, type: Optional[str], category: Optional[str], limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/openclaw/ecosystem/solutions",
f"{config.coordinator_url}/openclaw/ecosystem/solutions",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -554,7 +553,7 @@ def install(ctx, solution_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/openclaw/ecosystem/solutions/{solution_id}/install",
f"{config.coordinator_url}/openclaw/ecosystem/solutions/{solution_id}/install",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -586,7 +585,7 @@ def terminate(ctx, deployment_id: str):
try:
with httpx.Client() as client:
response = client.delete(
f"{config.coordinator_url}/v1/openclaw/deployments/{deployment_id}",
f"{config.coordinator_url}/openclaw/deployments/{deployment_id}",
headers={"X-Api-Key": config.api_key or ""}
)

View File

@@ -48,7 +48,7 @@ def enable(ctx, agent_id: str, mode: str, scope: str, aggressiveness: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/enable",
f"{config.coordinator_url}/optimize/agents/{agent_id}/enable",
headers={"X-Api-Key": config.api_key or ""},
json=optimization_config
)
@@ -83,7 +83,7 @@ def status(ctx, agent_id: str, metrics: str, real_time: bool, interval: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/status",
f"{config.coordinator_url}/optimize/agents/{agent_id}/status",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -151,7 +151,7 @@ def objectives(ctx, agent_id: str, targets: str, priority: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/objectives",
f"{config.coordinator_url}/optimize/agents/{agent_id}/objectives",
headers={"X-Api-Key": config.api_key or ""},
json=objectives_data
)
@@ -190,7 +190,7 @@ def recommendations(ctx, agent_id: str, priority: str, category: Optional[str]):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/recommendations",
f"{config.coordinator_url}/optimize/agents/{agent_id}/recommendations",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -223,7 +223,7 @@ def apply(ctx, agent_id: str, recommendation_id: str, confirm: bool):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/apply/{recommendation_id}",
f"{config.coordinator_url}/optimize/agents/{agent_id}/apply/{recommendation_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -249,7 +249,6 @@ def predict():
optimize.add_command(predict)
@predict.command()
@click.argument("agent_id")
@click.option("--horizon", default=24, help="Prediction horizon in hours")
@@ -269,7 +268,7 @@ def predict(ctx, agent_id: str, horizon: int, resources: str, confidence: float)
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/predict/agents/{agent_id}/resources",
f"{config.coordinator_url}/predict/agents/{agent_id}/resources",
headers={"X-Api-Key": config.api_key or ""},
json=prediction_data
)
@@ -288,7 +287,6 @@ def predict(ctx, agent_id: str, horizon: int, resources: str, confidence: float)
ctx.exit(1)
@predict.command()
@click.argument("agent_id")
@click.option("--policy", default="cost-efficiency",
type=click.Choice(["cost-efficiency", "performance", "availability", "hybrid"]),
@@ -311,7 +309,7 @@ def autoscale(ctx, agent_id: str, policy: str, min_instances: int, max_instances
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/predict/agents/{agent_id}/autoscale",
f"{config.coordinator_url}/predict/agents/{agent_id}/autoscale",
headers={"X-Api-Key": config.api_key or ""},
json=autoscale_config
)
@@ -330,7 +328,6 @@ def autoscale(ctx, agent_id: str, policy: str, min_instances: int, max_instances
ctx.exit(1)
@predict.command()
@click.argument("agent_id")
@click.option("--metric", required=True, help="Metric to forecast (throughput, latency, cost, etc.)")
@click.option("--period", default=7, help="Forecast period in days")
@@ -351,7 +348,7 @@ def forecast(ctx, agent_id: str, metric: str, period: int, granularity: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/predict/agents/{agent_id}/forecast",
f"{config.coordinator_url}/predict/agents/{agent_id}/forecast",
headers={"X-Api-Key": config.api_key or ""},
json=forecast_params
)
@@ -400,7 +397,7 @@ def auto(ctx, agent_id: str, parameters: Optional[str], objective: str, iteratio
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/tune/agents/{agent_id}/auto",
f"{config.coordinator_url}/tune/agents/{agent_id}/auto",
headers={"X-Api-Key": config.api_key or ""},
json=tuning_data
)
@@ -431,7 +428,7 @@ def status(ctx, tuning_id: str, watch: bool):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/tune/sessions/{tuning_id}",
f"{config.coordinator_url}/tune/sessions/{tuning_id}",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -475,7 +472,7 @@ def results(ctx, tuning_id: str):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/tune/sessions/{tuning_id}/results",
f"{config.coordinator_url}/tune/sessions/{tuning_id}/results",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -500,7 +497,7 @@ def disable(ctx, agent_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/optimize/agents/{agent_id}/disable",
f"{config.coordinator_url}/optimize/agents/{agent_id}/disable",
headers={"X-Api-Key": config.api_key or ""}
)

View File

@@ -39,7 +39,7 @@ def join(ctx, role: str, capability: str, region: Optional[str], priority: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/swarm/join",
f"{config.coordinator_url}/swarm/join",
headers={"X-Api-Key": config.api_key or ""},
json=swarm_data
)
@@ -80,7 +80,7 @@ def coordinate(ctx, task: str, collaborators: int, strategy: str, timeout: int):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/swarm/coordinate",
f"{config.coordinator_url}/swarm/coordinate",
headers={"X-Api-Key": config.api_key or ""},
json=coordination_data
)
@@ -117,7 +117,7 @@ def list(ctx, swarm_id: Optional[str], status: Optional[str], limit: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/swarm/list",
f"{config.coordinator_url}/swarm/list",
headers={"X-Api-Key": config.api_key or ""},
params=params
)
@@ -146,7 +146,7 @@ def status(ctx, task_id: str, real_time: bool, interval: int):
try:
with httpx.Client() as client:
response = client.get(
f"{config.coordinator_url}/v1/swarm/tasks/{task_id}/status",
f"{config.coordinator_url}/swarm/tasks/{task_id}/status",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -194,7 +194,7 @@ def leave(ctx, swarm_id: str):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/swarm/{swarm_id}/leave",
f"{config.coordinator_url}/swarm/{swarm_id}/leave",
headers={"X-Api-Key": config.api_key or ""}
)
@@ -227,7 +227,7 @@ def consensus(ctx, task_id: str, consensus_threshold: float):
try:
with httpx.Client() as client:
response = client.post(
f"{config.coordinator_url}/v1/swarm/tasks/{task_id}/consensus",
f"{config.coordinator_url}/swarm/tasks/{task_id}/consensus",
headers={"X-Api-Key": config.api_key or ""},
json=consensus_data
)