network: add hub registration, Redis persistence, and federated mesh join protocol
Some checks failed
Systemd Sync / sync-systemd (push) Waiting to run
CLI Tests / test-cli (push) Has been cancelled
Integration Tests / test-service-integration (push) Has been cancelled
Python Tests / test-python (push) Has been cancelled
Security Scanning / security-scan (push) Has been cancelled
Documentation Validation / validate-docs (push) Has been cancelled
API Endpoint Tests / test-api-endpoints (push) Has been cancelled
Some checks failed
Systemd Sync / sync-systemd (push) Waiting to run
CLI Tests / test-cli (push) Has been cancelled
Integration Tests / test-service-integration (push) Has been cancelled
Python Tests / test-python (push) Has been cancelled
Security Scanning / security-scan (push) Has been cancelled
Documentation Validation / validate-docs (push) Has been cancelled
API Endpoint Tests / test-api-endpoints (push) Has been cancelled
- Change default P2P port from 7070 to 8001 in config and .env.example - Add redis_url configuration option for hub persistence (default: redis://localhost:6379) - Implement DNS-based hub registration/unregistration via HTTPS API endpoints - Add Redis persistence for hub registrations with 1-hour TTL - Add island join request/response protocol with member list and blockchain credentials - Add GPU marketplace tracking (offers, bids, providers) in hub manager - Add
This commit is contained in:
@@ -7,8 +7,9 @@ supported_chains=ait-devnet
|
||||
rpc_bind_host=0.0.0.0
|
||||
rpc_bind_port=8006
|
||||
|
||||
# Network
|
||||
p2p_bind_host=0.0.0.0
|
||||
p2p_bind_port=7070
|
||||
p2p_bind_port=8001
|
||||
|
||||
proposer_id=aitbc1-proposer
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ class ChainSettings(BaseSettings):
|
||||
supported_chains: str = "ait-devnet" # Comma-separated list of supported chain IDs
|
||||
db_path: Path = Path("/var/lib/aitbc/data/chain.db")
|
||||
|
||||
rpc_bind_host: str = "127.0.0.1"
|
||||
rpc_bind_host: str = "0.0.0.0"
|
||||
rpc_bind_port: int = 8080
|
||||
|
||||
p2p_bind_host: str = "127.0.0.2"
|
||||
p2p_bind_port: int = 7070
|
||||
p2p_bind_host: str = "0.0.0.0"
|
||||
p2p_bind_port: int = 8001
|
||||
|
||||
proposer_id: str = ""
|
||||
proposer_key: Optional[str] = None
|
||||
@@ -85,6 +85,9 @@ class ChainSettings(BaseSettings):
|
||||
hub_discovery_url: str = "hub.aitbc.bubuit.net" # Hub discovery DNS
|
||||
bridge_islands: str = "" # Comma-separated list of islands to bridge (optional)
|
||||
|
||||
# Redis Configuration (Hub persistence)
|
||||
redis_url: str = "redis://localhost:6379" # Redis connection URL
|
||||
|
||||
# Keystore for proposer private key (future block signing)
|
||||
keystore_path: Path = Path("/var/lib/aitbc/keystore")
|
||||
keystore_password_file: Path = Path("/var/lib/aitbc/keystore/.password")
|
||||
|
||||
@@ -75,9 +75,9 @@ class P2PDiscovery:
|
||||
"""Add bootstrap node for initial connection"""
|
||||
self.bootstrap_nodes.append((address, port))
|
||||
|
||||
def generate_node_id(self, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from address, port, and public key"""
|
||||
content = f"{address}:{port}:{public_key}"
|
||||
def generate_node_id(self, hostname: str, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from hostname, address, port, and public key"""
|
||||
content = f"{hostname}:{address}:{port}:{public_key}"
|
||||
return hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
async def start_discovery(self):
|
||||
|
||||
@@ -6,7 +6,8 @@ DNS-based hub discovery for federated mesh with hardcoded fallback
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from typing import List, Optional, Tuple
|
||||
import json
|
||||
from typing import List, Optional, Tuple, Dict
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -102,16 +103,73 @@ class HubDiscovery:
|
||||
for address, port in self.FALLBACK_HUBS
|
||||
]
|
||||
|
||||
async def register_hub(self, hub_address: str, hub_port: int, discovery_url: Optional[str] = None) -> bool:
|
||||
async def register_hub(self, hub_info: Dict, discovery_url: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Register this node as a hub (placeholder for future DNS registration)
|
||||
|
||||
Note: This is a placeholder for future DNS registration functionality.
|
||||
Currently, hub registration is done via manual DNS configuration.
|
||||
Register this node as a hub with DNS discovery service
|
||||
|
||||
Args:
|
||||
hub_info: Dictionary containing hub information (node_id, address, port, island_id, island_name, public_address, public_port, public_key_pem)
|
||||
discovery_url: Optional custom discovery URL (uses default if not provided)
|
||||
|
||||
Returns:
|
||||
bool: True if registration successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Hub registration placeholder: {hub_address}:{hub_port}")
|
||||
# Future: Implement dynamic DNS registration
|
||||
return True
|
||||
url = discovery_url or self.discovery_url
|
||||
registration_url = f"https://{url}/api/register"
|
||||
|
||||
try:
|
||||
import httpx
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(registration_url, json=hub_info)
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Successfully registered hub {hub_info.get('node_id')} with DNS discovery service")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"DNS registration failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"DNS registration request failed: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"DNS registration error: {e}")
|
||||
return False
|
||||
|
||||
async def unregister_hub(self, node_id: str, discovery_url: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Unregister this node as a hub from DNS discovery service
|
||||
|
||||
Args:
|
||||
node_id: Node ID to unregister
|
||||
discovery_url: Optional custom discovery URL (uses default if not provided)
|
||||
|
||||
Returns:
|
||||
bool: True if unregistration successful, False otherwise
|
||||
"""
|
||||
url = discovery_url or self.discovery_url
|
||||
unregistration_url = f"https://{url}/api/unregister"
|
||||
|
||||
try:
|
||||
import httpx
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(unregistration_url, json={"node_id": node_id})
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Successfully unregistered hub {node_id} from DNS discovery service")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"DNS unregistration failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"DNS unregistration request failed: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"DNS unregistration error: {e}")
|
||||
return False
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear cached hub list"""
|
||||
|
||||
@@ -6,8 +6,10 @@ Manages hub operations, peer list sharing, and hub registration for federated me
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
from typing import Dict, List, Optional, Set
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from enum import Enum
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -50,45 +52,319 @@ class PeerInfo:
|
||||
|
||||
class HubManager:
|
||||
"""Manages hub operations for federated mesh"""
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int, island_id: str, island_name: str):
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int, island_id: str, island_name: str, redis_url: Optional[str] = None):
|
||||
self.local_node_id = local_node_id
|
||||
self.local_address = local_address
|
||||
self.local_port = local_port
|
||||
self.island_id = island_id
|
||||
self.island_name = island_name
|
||||
|
||||
self.redis_url = redis_url or "redis://localhost:6379"
|
||||
|
||||
# Hub registration status
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at: Optional[float] = None
|
||||
|
||||
|
||||
# Known hubs
|
||||
self.known_hubs: Dict[str, HubInfo] = {} # node_id -> HubInfo
|
||||
|
||||
|
||||
# Peer registry (for providing peer lists)
|
||||
self.peer_registry: Dict[str, PeerInfo] = {} # node_id -> PeerInfo
|
||||
|
||||
|
||||
# Island peers (island_id -> set of node_ids)
|
||||
self.island_peers: Dict[str, Set[str]] = {}
|
||||
|
||||
|
||||
self.running = False
|
||||
|
||||
self._redis = None
|
||||
|
||||
# Initialize island peers for our island
|
||||
self.island_peers[self.island_id] = set()
|
||||
|
||||
def register_as_hub(self, public_address: Optional[str] = None, public_port: Optional[int] = None) -> bool:
|
||||
async def _connect_redis(self):
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
self._redis = redis.from_url(self.redis_url)
|
||||
await self._redis.ping()
|
||||
logger.info(f"Connected to Redis for hub persistence: {self.redis_url}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
return False
|
||||
|
||||
async def _persist_hub_registration(self, hub_info: HubInfo) -> bool:
|
||||
"""Persist hub registration to Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping persistence")
|
||||
return False
|
||||
|
||||
key = f"hub:{hub_info.node_id}"
|
||||
value = json.dumps(asdict(hub_info), default=str)
|
||||
await self._redis.setex(key, 3600, value) # TTL: 1 hour
|
||||
logger.info(f"Persisted hub registration to Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _remove_hub_registration(self, node_id: str) -> bool:
|
||||
"""Remove hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping removal")
|
||||
return False
|
||||
|
||||
key = f"hub:{node_id}"
|
||||
await self._redis.delete(key)
|
||||
logger.info(f"Removed hub registration from Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _load_hub_registration(self) -> Optional[HubInfo]:
|
||||
"""Load hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
return None
|
||||
|
||||
key = f"hub:{self.local_node_id}"
|
||||
value = await self._redis.get(key)
|
||||
if value:
|
||||
data = json.loads(value)
|
||||
return HubInfo(**data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load hub registration: {e}")
|
||||
return None
|
||||
|
||||
def _get_blockchain_credentials(self) -> dict:
|
||||
"""Get blockchain credentials from keystore"""
|
||||
try:
|
||||
credentials = {}
|
||||
|
||||
# Get genesis block hash from genesis.json
|
||||
genesis_path = '/var/lib/aitbc/data/ait-mainnet/genesis.json'
|
||||
if os.path.exists(genesis_path):
|
||||
with open(genesis_path, 'r') as f:
|
||||
genesis_data = json.load(f)
|
||||
# Get genesis block hash
|
||||
if 'blocks' in genesis_data and len(genesis_data['blocks']) > 0:
|
||||
genesis_block = genesis_data['blocks'][0]
|
||||
credentials['genesis_block_hash'] = genesis_block.get('hash', '')
|
||||
credentials['genesis_block'] = genesis_data
|
||||
|
||||
# Get genesis address from keystore
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's address
|
||||
for key_id, key_data in keys.items():
|
||||
# Extract address from public key or use key_id
|
||||
credentials['genesis_address'] = key_id
|
||||
break
|
||||
|
||||
# Add chain info
|
||||
credentials['chain_id'] = self.island_chain_id or f"ait-{self.island_id[:8]}"
|
||||
credentials['island_id'] = self.island_id
|
||||
credentials['island_name'] = self.island_name
|
||||
|
||||
# Add RPC endpoint (local)
|
||||
credentials['rpc_endpoint'] = f"http://{self.local_address}:8006"
|
||||
credentials['p2p_port'] = self.local_port
|
||||
|
||||
return credentials
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get blockchain credentials: {e}")
|
||||
return {}
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int,
|
||||
island_id: str, island_name: str, redis_url: str):
|
||||
self.local_node_id = local_node_id
|
||||
self.local_address = local_address
|
||||
self.local_port = local_port
|
||||
self.island_id = island_id
|
||||
self.island_name = island_name
|
||||
self.island_chain_id = f"ait-{island_id[:8]}"
|
||||
|
||||
self.known_hubs: Dict[str, HubInfo] = {}
|
||||
self.peer_registry: Dict[str, PeerInfo] = {}
|
||||
self.peer_reputation: Dict[str, float] = {}
|
||||
self.peer_last_seen: Dict[str, float] = {}
|
||||
|
||||
# GPU marketplace tracking
|
||||
self.gpu_offers: Dict[str, dict] = {}
|
||||
self.gpu_bids: Dict[str, dict] = {}
|
||||
self.gpu_providers: Dict[str, dict] = {} # node_id -> gpu info
|
||||
|
||||
# Exchange tracking
|
||||
self.exchange_orders: Dict[str, dict] = {} # order_id -> order info
|
||||
self.exchange_order_books: Dict[str, Dict] = {} # pair -> {bids: [], asks: []}
|
||||
|
||||
# Redis client for persistence
|
||||
self.redis_url = redis_url
|
||||
self._redis_client = None
|
||||
|
||||
async def handle_join_request(self, join_request: dict) -> Optional[dict]:
|
||||
"""
|
||||
Handle island join request from a new node
|
||||
|
||||
Args:
|
||||
join_request: Dictionary containing join request data
|
||||
|
||||
Returns:
|
||||
dict: Join response with member list and credentials, or None if failed
|
||||
"""
|
||||
try:
|
||||
requested_island_id = join_request.get('island_id')
|
||||
|
||||
# Validate island ID
|
||||
if requested_island_id != self.island_id:
|
||||
logger.warning(f"Join request for island {requested_island_id} does not match our island {self.island_id}")
|
||||
return None
|
||||
|
||||
# Get all island members
|
||||
members = []
|
||||
for node_id, peer_info in self.peer_registry.items():
|
||||
if peer_info.island_id == self.island_id:
|
||||
members.append({
|
||||
'node_id': peer_info.node_id,
|
||||
'address': peer_info.address,
|
||||
'port': peer_info.port,
|
||||
'is_hub': peer_info.is_hub,
|
||||
'public_address': peer_info.public_address,
|
||||
'public_port': peer_info.public_port
|
||||
})
|
||||
|
||||
# Include self in member list
|
||||
members.append({
|
||||
'node_id': self.local_node_id,
|
||||
'address': self.local_address,
|
||||
'port': self.local_port,
|
||||
'is_hub': True,
|
||||
'public_address': self.known_hubs.get(self.local_node_id, {}).public_address if self.local_node_id in self.known_hubs else None,
|
||||
'public_port': self.known_hubs.get(self.local_node_id, {}).public_port if self.local_node_id in self.known_hubs else None
|
||||
})
|
||||
|
||||
# Get blockchain credentials
|
||||
credentials = self._get_blockchain_credentials()
|
||||
|
||||
# Build response
|
||||
response = {
|
||||
'type': 'join_response',
|
||||
'island_id': self.island_id,
|
||||
'island_name': self.island_name,
|
||||
'island_chain_id': self.island_chain_id or f"ait-{self.island_id[:8]}",
|
||||
'members': members,
|
||||
'credentials': credentials
|
||||
}
|
||||
|
||||
logger.info(f"Sent join_response to node {join_request.get('node_id')} with {len(members)} members")
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling join request: {e}")
|
||||
return None
|
||||
|
||||
def register_gpu_offer(self, offer_data: dict) -> bool:
|
||||
"""Register a GPU marketplace offer in the hub"""
|
||||
try:
|
||||
offer_id = offer_data.get('offer_id')
|
||||
if offer_id:
|
||||
self.gpu_offers[offer_id] = offer_data
|
||||
logger.info(f"Registered GPU offer: {offer_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU offer: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_bid(self, bid_data: dict) -> bool:
|
||||
"""Register a GPU marketplace bid in the hub"""
|
||||
try:
|
||||
bid_id = bid_data.get('bid_id')
|
||||
if bid_id:
|
||||
self.gpu_bids[bid_id] = bid_data
|
||||
logger.info(f"Registered GPU bid: {bid_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU bid: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_provider(self, node_id: str, gpu_info: dict) -> bool:
|
||||
"""Register a GPU provider in the hub"""
|
||||
try:
|
||||
self.gpu_providers[node_id] = gpu_info
|
||||
logger.info(f"Registered GPU provider: {node_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU provider: {e}")
|
||||
return False
|
||||
|
||||
def register_exchange_order(self, order_data: dict) -> bool:
|
||||
"""Register an exchange order in the hub"""
|
||||
try:
|
||||
order_id = order_data.get('order_id')
|
||||
if order_id:
|
||||
self.exchange_orders[order_id] = order_data
|
||||
|
||||
# Update order book
|
||||
pair = order_data.get('pair')
|
||||
side = order_data.get('side')
|
||||
if pair and side:
|
||||
if pair not in self.exchange_order_books:
|
||||
self.exchange_order_books[pair] = {'bids': [], 'asks': []}
|
||||
|
||||
if side == 'buy':
|
||||
self.exchange_order_books[pair]['bids'].append(order_data)
|
||||
elif side == 'sell':
|
||||
self.exchange_order_books[pair]['asks'].append(order_data)
|
||||
|
||||
logger.info(f"Registered exchange order: {order_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering exchange order: {e}")
|
||||
return False
|
||||
|
||||
def get_gpu_offers(self) -> list:
|
||||
"""Get all GPU offers"""
|
||||
return list(self.gpu_offers.values())
|
||||
|
||||
def get_gpu_bids(self) -> list:
|
||||
"""Get all GPU bids"""
|
||||
return list(self.gpu_bids.values())
|
||||
|
||||
def get_gpu_providers(self) -> list:
|
||||
"""Get all GPU providers"""
|
||||
return list(self.gpu_providers.values())
|
||||
|
||||
def get_exchange_order_book(self, pair: str) -> dict:
|
||||
"""Get order book for a specific trading pair"""
|
||||
return self.exchange_order_books.get(pair, {'bids': [], 'asks': []})
|
||||
|
||||
async def register_as_hub(self, public_address: Optional[str] = None, public_port: Optional[int] = None) -> bool:
|
||||
"""Register this node as a hub"""
|
||||
if self.is_hub:
|
||||
logger.warning("Already registered as hub")
|
||||
return False
|
||||
|
||||
|
||||
self.is_hub = True
|
||||
self.hub_status = HubStatus.REGISTERED
|
||||
self.registered_at = time.time()
|
||||
|
||||
|
||||
# Add self to known hubs
|
||||
self.known_hubs[self.local_node_id] = HubInfo(
|
||||
hub_info = HubInfo(
|
||||
node_id=self.local_node_id,
|
||||
address=self.local_address,
|
||||
port=self.local_port,
|
||||
@@ -99,24 +375,31 @@ class HubManager:
|
||||
registered_at=time.time(),
|
||||
last_seen=time.time()
|
||||
)
|
||||
|
||||
self.known_hubs[self.local_node_id] = hub_info
|
||||
|
||||
# Persist to Redis
|
||||
await self._persist_hub_registration(hub_info)
|
||||
|
||||
logger.info(f"Registered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
def unregister_as_hub(self) -> bool:
|
||||
async def unregister_as_hub(self) -> bool:
|
||||
"""Unregister this node as a hub"""
|
||||
if not self.is_hub:
|
||||
logger.warning("Not registered as hub")
|
||||
return False
|
||||
|
||||
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at = None
|
||||
|
||||
|
||||
# Remove from Redis
|
||||
await self._remove_hub_registration(self.local_node_id)
|
||||
|
||||
# Remove self from known hubs
|
||||
if self.local_node_id in self.known_hubs:
|
||||
del self.known_hubs[self.local_node_id]
|
||||
|
||||
|
||||
logger.info(f"Unregistered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
|
||||
@@ -88,10 +88,11 @@ class P2PNetworkService:
|
||||
self.host,
|
||||
self.port,
|
||||
self.island_id,
|
||||
self.island_name
|
||||
self.island_name,
|
||||
self.config.redis_url
|
||||
)
|
||||
self.hub_manager.register_as_hub(self.public_endpoint[0] if self.public_endpoint else None,
|
||||
self.public_endpoint[1] if self.public_endpoint else None)
|
||||
await self.hub_manager.register_as_hub(self.public_endpoint[0] if self.public_endpoint else None,
|
||||
self.public_endpoint[1] if self.public_endpoint else None)
|
||||
logger.info("Initialized hub manager")
|
||||
|
||||
# Discover public endpoint via STUN if configured
|
||||
@@ -423,6 +424,40 @@ class P2PNetworkService:
|
||||
|
||||
elif msg_type == 'handshake':
|
||||
pass # Ignore subsequent handshakes
|
||||
elif msg_type == 'join_request':
|
||||
# Handle island join request (only if we're a hub)
|
||||
if self.hub_manager:
|
||||
logger.info(f"Received join_request from {peer_id}")
|
||||
response = await self.hub_manager.handle_join_request(message)
|
||||
if response:
|
||||
await self._send_message(writer, response)
|
||||
else:
|
||||
logger.warning(f"Received join_request but not a hub, ignoring")
|
||||
elif msg_type == 'join_response':
|
||||
# Handle island join response (only if we requested to join)
|
||||
logger.info(f"Received join_response from {peer_id}")
|
||||
# Store the response for the CLI to retrieve
|
||||
if not hasattr(self, '_join_response'):
|
||||
self._join_response = {}
|
||||
self._join_response[peer_id] = message
|
||||
elif msg_type == 'gpu_provider_query':
|
||||
# Handle GPU provider query
|
||||
logger.info(f"Received gpu_provider_query from {peer_id}")
|
||||
# Respond with GPU availability
|
||||
gpu_response = {
|
||||
'type': 'gpu_provider_response',
|
||||
'node_id': self.node_id,
|
||||
'gpu_available': self._get_gpu_count(),
|
||||
'gpu_specs': self._get_gpu_specs()
|
||||
}
|
||||
await self._send_message(writer, gpu_response)
|
||||
elif msg_type == 'gpu_provider_response':
|
||||
# Handle GPU provider response
|
||||
logger.info(f"Received gpu_provider_response from {peer_id}")
|
||||
# Store the response for the CLI to retrieve
|
||||
if not hasattr(self, '_gpu_provider_responses'):
|
||||
self._gpu_provider_responses = {}
|
||||
self._gpu_provider_responses[peer_id] = message
|
||||
elif msg_type == 'new_transaction':
|
||||
tx_data = message.get('tx')
|
||||
if tx_data:
|
||||
@@ -470,28 +505,101 @@ class P2PNetworkService:
|
||||
writer.close()
|
||||
try:
|
||||
await writer.wait_closed()
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
async def _send_message(self, writer: asyncio.StreamWriter, message: dict):
|
||||
"""Helper to send a JSON message over a stream"""
|
||||
def _get_gpu_count(self) -> int:
|
||||
"""Get the number of available GPUs on this node"""
|
||||
try:
|
||||
data = json.dumps(message) + '\n'
|
||||
writer.write(data.encode())
|
||||
await writer.drain()
|
||||
# Try to read GPU count from system
|
||||
# This is a placeholder - in a real implementation, this would
|
||||
# query the actual GPU hardware or a configuration file
|
||||
import os
|
||||
gpu_config_path = '/var/lib/aitbc/gpu_config.json'
|
||||
if os.path.exists(gpu_config_path):
|
||||
with open(gpu_config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
return config.get('gpu_count', 0)
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send message: {e}")
|
||||
logger.error(f"Error getting GPU count: {e}")
|
||||
return 0
|
||||
|
||||
async def _ping_peers_loop(self):
|
||||
"""Periodically broadcast pings to all active connections to keep them alive"""
|
||||
while not self._stop_event.is_set():
|
||||
await asyncio.sleep(20)
|
||||
ping_msg = {'type': 'ping', 'node_id': self.node_id}
|
||||
|
||||
# Make a copy of writers to avoid dictionary changed during iteration error
|
||||
writers = list(self.active_connections.values())
|
||||
for writer in writers:
|
||||
await self._send_message(writer, ping_msg)
|
||||
def _get_gpu_specs(self) -> dict:
|
||||
"""Get GPU specifications for this node"""
|
||||
try:
|
||||
# Try to read GPU specs from system
|
||||
# This is a placeholder - in a real implementation, this would
|
||||
# query the actual GPU hardware or a configuration file
|
||||
import os
|
||||
gpu_config_path = '/var/lib/aitbc/gpu_config.json'
|
||||
if os.path.exists(gpu_config_path):
|
||||
with open(gpu_config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
return config.get('specs', {})
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting GPU specs: {e}")
|
||||
return {}
|
||||
|
||||
async def send_join_request(self, hub_address: str, hub_port: int, island_id: str, island_name: str, node_id: str, public_key_pem: str) -> Optional[dict]:
|
||||
"""
|
||||
Send join request to a hub and wait for response
|
||||
|
||||
Args:
|
||||
hub_address: Hub IP address or hostname
|
||||
hub_port: Hub port
|
||||
island_id: Island ID to join
|
||||
island_name: Island name
|
||||
node_id: Local node ID
|
||||
public_key_pem: Public key PEM
|
||||
|
||||
Returns:
|
||||
dict: Join response from hub, or None if failed
|
||||
"""
|
||||
try:
|
||||
# Connect to hub
|
||||
reader, writer = await asyncio.open_connection(hub_address, hub_port)
|
||||
logger.info(f"Connected to hub {hub_address}:{hub_port}")
|
||||
|
||||
# Send join request
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': node_id,
|
||||
'island_id': island_id,
|
||||
'island_name': island_name,
|
||||
'public_key_pem': public_key_pem
|
||||
}
|
||||
await self._send_message(writer, join_request)
|
||||
logger.info(f"Sent join_request to hub")
|
||||
|
||||
# Wait for join response (with timeout)
|
||||
try:
|
||||
data = await asyncio.wait_for(reader.readline(), timeout=30.0)
|
||||
if data:
|
||||
response = json.loads(data.decode().strip())
|
||||
if response.get('type') == 'join_response':
|
||||
logger.info(f"Received join_response from hub")
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return response
|
||||
else:
|
||||
logger.warning(f"Unexpected response type: {response.get('type')}")
|
||||
else:
|
||||
logger.warning("No response from hub")
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("Timeout waiting for join response")
|
||||
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return None
|
||||
|
||||
except ConnectionRefusedError:
|
||||
logger.error(f"Hub {hub_address}:{hub_port} refused connection")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send join request: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def run_p2p_service(host: str, port: int, node_id: str, peers: str):
|
||||
|
||||
@@ -60,7 +60,7 @@ def _serialize_receipt(receipt: Receipt) -> Dict[str, Any]:
|
||||
|
||||
|
||||
class TransactionRequest(BaseModel):
|
||||
type: str = Field(description="Transaction type, e.g. TRANSFER or RECEIPT_CLAIM")
|
||||
type: str = Field(description="Transaction type, e.g. TRANSFER, RECEIPT_CLAIM, GPU_MARKETPLACE, EXCHANGE")
|
||||
sender: str
|
||||
nonce: int
|
||||
fee: int = Field(ge=0)
|
||||
@@ -70,8 +70,9 @@ class TransactionRequest(BaseModel):
|
||||
@model_validator(mode="after")
|
||||
def normalize_type(self) -> "TransactionRequest": # type: ignore[override]
|
||||
normalized = self.type.upper()
|
||||
if normalized not in {"TRANSFER", "RECEIPT_CLAIM"}:
|
||||
raise ValueError(f"unsupported transaction type: {self.type}")
|
||||
valid_types = {"TRANSFER", "RECEIPT_CLAIM", "GPU_MARKETPLACE", "EXCHANGE"}
|
||||
if normalized not in valid_types:
|
||||
raise ValueError(f"unsupported transaction type: {normalized}. Valid types: {valid_types}")
|
||||
self.type = normalized
|
||||
return self
|
||||
|
||||
@@ -201,31 +202,83 @@ async def get_mempool(chain_id: str = None, limit: int = 100) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.get("/accounts/{address}", summary="Get account information")
|
||||
async def get_account(address: str) -> Dict[str, Any]:
|
||||
"""Get account information including balance"""
|
||||
from ..models import Account
|
||||
async def get_account(address: str, chain_id: str = None) -> Dict[str, Any]:
|
||||
"""Get account information"""
|
||||
chain_id = get_chain_id(chain_id)
|
||||
|
||||
try:
|
||||
with session_scope() as session:
|
||||
account = session.exec(select(Account).where(Account.address == address)).first()
|
||||
with session_scope() as session:
|
||||
account = session.exec(select(Account).where(Account.address == address).where(Account.chain_id == chain_id)).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"chain_id": account.chain_id
|
||||
}
|
||||
|
||||
|
||||
@router.get("/transactions", summary="Query transactions")
|
||||
async def query_transactions(
|
||||
transaction_type: Optional[str] = None,
|
||||
island_id: Optional[str] = None,
|
||||
pair: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
order_id: Optional[str] = None,
|
||||
limit: Optional[int] = 100,
|
||||
chain_id: str = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Query transactions with optional filters"""
|
||||
chain_id = get_chain_id(chain_id)
|
||||
|
||||
with session_scope() as session:
|
||||
query = select(Transaction).where(Transaction.chain_id == chain_id)
|
||||
|
||||
# Apply filters based on payload fields
|
||||
transactions = session.exec(query).all()
|
||||
|
||||
results = []
|
||||
for tx in transactions:
|
||||
# Filter by transaction type in payload
|
||||
if transaction_type and tx.payload.get('type') != transaction_type:
|
||||
continue
|
||||
|
||||
if account is None:
|
||||
return {
|
||||
"address": address,
|
||||
"balance": 0,
|
||||
"nonce": 0,
|
||||
"exists": False
|
||||
}
|
||||
# Filter by island_id in payload
|
||||
if island_id and tx.payload.get('island_id') != island_id:
|
||||
continue
|
||||
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"exists": True
|
||||
}
|
||||
except Exception as e:
|
||||
_logger.error("Failed to get account", extra={"error": str(e), "address": address})
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get account: {str(e)}")
|
||||
# Filter by pair in payload
|
||||
if pair and tx.payload.get('pair') != pair:
|
||||
continue
|
||||
|
||||
# Filter by status in payload
|
||||
if status and tx.payload.get('status') != status:
|
||||
continue
|
||||
|
||||
# Filter by order_id in payload
|
||||
if order_id and tx.payload.get('order_id') != order_id and tx.payload.get('offer_id') != order_id and tx.payload.get('bid_id') != order_id:
|
||||
continue
|
||||
|
||||
results.append({
|
||||
"transaction_id": tx.id,
|
||||
"tx_hash": tx.tx_hash,
|
||||
"sender": tx.sender,
|
||||
"recipient": tx.recipient,
|
||||
"payload": tx.payload,
|
||||
"status": tx.status,
|
||||
"created_at": tx.created_at.isoformat(),
|
||||
"timestamp": tx.timestamp,
|
||||
"nonce": tx.nonce,
|
||||
"value": tx.value,
|
||||
"fee": tx.fee
|
||||
})
|
||||
|
||||
# Apply limit
|
||||
if limit:
|
||||
results = results[:limit]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/blocks-range", summary="Get blocks in height range")
|
||||
|
||||
@@ -21,17 +21,18 @@ class TestP2PDiscovery:
|
||||
|
||||
def test_generate_node_id(self):
|
||||
"""Test node ID generation"""
|
||||
hostname = "node1.example.com"
|
||||
address = "127.0.0.1"
|
||||
port = 8000
|
||||
public_key = "test_public_key"
|
||||
|
||||
node_id = self.discovery.generate_node_id(address, port, public_key)
|
||||
|
||||
|
||||
node_id = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
|
||||
assert isinstance(node_id, str)
|
||||
assert len(node_id) == 64 # SHA256 hex length
|
||||
|
||||
|
||||
# Test consistency
|
||||
node_id2 = self.discovery.generate_node_id(address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
assert node_id == node_id2
|
||||
|
||||
def test_add_bootstrap_node(self):
|
||||
@@ -45,17 +46,18 @@ class TestP2PDiscovery:
|
||||
|
||||
def test_generate_node_id_consistency(self):
|
||||
"""Test node ID generation consistency"""
|
||||
hostname = "node2.example.com"
|
||||
address = "192.168.1.1"
|
||||
port = 9000
|
||||
public_key = "test_key"
|
||||
|
||||
node_id1 = self.discovery.generate_node_id(address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(address, port, public_key)
|
||||
|
||||
|
||||
node_id1 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
|
||||
assert node_id1 == node_id2
|
||||
|
||||
|
||||
# Different inputs should produce different IDs
|
||||
node_id3 = self.discovery.generate_node_id("192.168.1.2", port, public_key)
|
||||
node_id3 = self.discovery.generate_node_id(hostname, "192.168.1.2", port, public_key)
|
||||
assert node_id1 != node_id3
|
||||
|
||||
def test_get_peer_count_empty(self):
|
||||
|
||||
324
apps/blockchain-node/tests/network/test_hub_manager.py
Normal file
324
apps/blockchain-node/tests/network/test_hub_manager.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Tests for Hub Manager with Redis persistence
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import asyncio
|
||||
from unittest.mock import Mock, AsyncMock, patch
|
||||
from aitbc_chain.network.hub_manager import HubManager, HubInfo, HubStatus, PeerInfo
|
||||
|
||||
|
||||
class TestHubManager:
|
||||
"""Test cases for Hub Manager with Redis persistence"""
|
||||
|
||||
@pytest.fixture
|
||||
def hub_manager(self):
|
||||
"""Create a HubManager instance for testing"""
|
||||
return HubManager(
|
||||
local_node_id="test-node-id",
|
||||
local_address="127.0.0.1",
|
||||
local_port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
redis_url="redis://localhost:6379"
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_connect_redis_success(self, hub_manager):
|
||||
"""Test successful Redis connection"""
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.ping = AsyncMock(return_value=True)
|
||||
mock_redis.from_url.return_value = mock_client
|
||||
|
||||
result = await hub_manager._connect_redis()
|
||||
|
||||
assert result is True
|
||||
assert hub_manager._redis is not None
|
||||
mock_redis.from_url.assert_called_once_with("redis://localhost:6379")
|
||||
mock_client.ping.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_connect_redis_failure(self, hub_manager):
|
||||
"""Test Redis connection failure"""
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_redis.from_url.side_effect = Exception("Connection failed")
|
||||
|
||||
result = await hub_manager._connect_redis()
|
||||
|
||||
assert result is False
|
||||
assert hub_manager._redis is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_persist_hub_registration_success(self, hub_manager):
|
||||
"""Test successful hub registration persistence to Redis"""
|
||||
hub_info = HubInfo(
|
||||
node_id="test-node-id",
|
||||
address="127.0.0.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
public_address="1.2.3.4",
|
||||
public_port=7070,
|
||||
registered_at=1234567890.0,
|
||||
last_seen=1234567890.0
|
||||
)
|
||||
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.setex = AsyncMock(return_value=True)
|
||||
mock_redis.from_url.return_value = mock_client
|
||||
|
||||
result = await hub_manager._persist_hub_registration(hub_info)
|
||||
|
||||
assert result is True
|
||||
mock_client.setex.assert_called_once()
|
||||
key = mock_client.setex.call_args[0][0]
|
||||
assert key == "hub:test-node-id"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_persist_hub_registration_no_redis(self, hub_manager):
|
||||
"""Test hub registration persistence when Redis is unavailable"""
|
||||
hub_info = HubInfo(
|
||||
node_id="test-node-id",
|
||||
address="127.0.0.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island"
|
||||
)
|
||||
|
||||
with patch.object(hub_manager, '_connect_redis', return_value=False):
|
||||
result = await hub_manager._persist_hub_registration(hub_info)
|
||||
|
||||
assert result is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_hub_registration_success(self, hub_manager):
|
||||
"""Test successful hub registration removal from Redis"""
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.delete = AsyncMock(return_value=True)
|
||||
mock_redis.from_url.return_value = mock_client
|
||||
|
||||
result = await hub_manager._remove_hub_registration("test-node-id")
|
||||
|
||||
assert result is True
|
||||
mock_client.delete.assert_called_once_with("hub:test-node-id")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_hub_registration_success(self, hub_manager):
|
||||
"""Test successful hub registration loading from Redis"""
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_client = AsyncMock()
|
||||
hub_data = {
|
||||
"node_id": "test-node-id",
|
||||
"address": "127.0.0.1",
|
||||
"port": 7070,
|
||||
"island_id": "test-island-id",
|
||||
"island_name": "test-island"
|
||||
}
|
||||
mock_client.get = AsyncMock(return_value='{"node_id": "test-node-id", "address": "127.0.0.1", "port": 7070, "island_id": "test-island-id", "island_name": "test-island"}')
|
||||
mock_redis.from_url.return_value = mock_client
|
||||
|
||||
result = await hub_manager._load_hub_registration()
|
||||
|
||||
assert result is not None
|
||||
assert result.node_id == "test-node-id"
|
||||
mock_client.get.assert_called_once_with("hub:test-node-id")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_hub_registration_not_found(self, hub_manager):
|
||||
"""Test hub registration loading when not found in Redis"""
|
||||
with patch('aitbc_chain.network.hub_manager.redis.asyncio') as mock_redis:
|
||||
mock_client = AsyncMock()
|
||||
mock_client.get = AsyncMock(return_value=None)
|
||||
mock_redis.from_url.return_value = mock_client
|
||||
|
||||
result = await hub_manager._load_hub_registration()
|
||||
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_as_hub_success(self, hub_manager):
|
||||
"""Test successful hub registration"""
|
||||
with patch.object(hub_manager, '_persist_hub_registration', return_value=True):
|
||||
result = await hub_manager.register_as_hub(public_address="1.2.3.4", public_port=7070)
|
||||
|
||||
assert result is True
|
||||
assert hub_manager.is_hub is True
|
||||
assert hub_manager.hub_status == HubStatus.REGISTERED
|
||||
assert hub_manager.registered_at is not None
|
||||
assert hub_manager.local_node_id in hub_manager.known_hubs
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_as_hub_already_registered(self, hub_manager):
|
||||
"""Test hub registration when already registered"""
|
||||
hub_manager.is_hub = True
|
||||
hub_manager.hub_status = HubStatus.REGISTERED
|
||||
|
||||
result = await hub_manager.register_as_hub()
|
||||
|
||||
assert result is False
|
||||
assert hub_manager.is_hub is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unregister_as_hub_success(self, hub_manager):
|
||||
"""Test successful hub unregistration"""
|
||||
hub_manager.is_hub = True
|
||||
hub_manager.hub_status = HubStatus.REGISTERED
|
||||
hub_manager.known_hubs["test-node-id"] = HubInfo(
|
||||
node_id="test-node-id",
|
||||
address="127.0.0.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island"
|
||||
)
|
||||
|
||||
with patch.object(hub_manager, '_remove_hub_registration', return_value=True):
|
||||
result = await hub_manager.unregister_as_hub()
|
||||
|
||||
assert result is True
|
||||
assert hub_manager.is_hub is False
|
||||
assert hub_manager.hub_status == HubStatus.UNREGISTERED
|
||||
assert hub_manager.registered_at is None
|
||||
assert hub_manager.local_node_id not in hub_manager.known_hubs
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unregister_as_hub_not_registered(self, hub_manager):
|
||||
"""Test hub unregistration when not registered"""
|
||||
result = await hub_manager.unregister_as_hub()
|
||||
|
||||
assert result is False
|
||||
assert hub_manager.is_hub is False
|
||||
|
||||
def test_register_peer(self, hub_manager):
|
||||
"""Test peer registration"""
|
||||
peer_info = PeerInfo(
|
||||
node_id="peer-1",
|
||||
address="192.168.1.1",
|
||||
port=7071,
|
||||
island_id="test-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
|
||||
result = hub_manager.register_peer(peer_info)
|
||||
|
||||
assert result is True
|
||||
assert "peer-1" in hub_manager.peer_registry
|
||||
assert "peer-1" in hub_manager.island_peers["test-island-id"]
|
||||
|
||||
def test_unregister_peer(self, hub_manager):
|
||||
"""Test peer unregistration"""
|
||||
peer_info = PeerInfo(
|
||||
node_id="peer-1",
|
||||
address="192.168.1.1",
|
||||
port=7071,
|
||||
island_id="test-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
hub_manager.register_peer(peer_info)
|
||||
|
||||
result = hub_manager.unregister_peer("peer-1")
|
||||
|
||||
assert result is True
|
||||
assert "peer-1" not in hub_manager.peer_registry
|
||||
assert "peer-1" not in hub_manager.island_peers["test-island-id"]
|
||||
|
||||
def test_add_known_hub(self, hub_manager):
|
||||
"""Test adding a known hub"""
|
||||
hub_info = HubInfo(
|
||||
node_id="hub-1",
|
||||
address="10.1.1.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island"
|
||||
)
|
||||
|
||||
hub_manager.add_known_hub(hub_info)
|
||||
|
||||
assert "hub-1" in hub_manager.known_hubs
|
||||
assert hub_manager.known_hubs["hub-1"] == hub_info
|
||||
|
||||
def test_remove_known_hub(self, hub_manager):
|
||||
"""Test removing a known hub"""
|
||||
hub_info = HubInfo(
|
||||
node_id="hub-1",
|
||||
address="10.1.1.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island"
|
||||
)
|
||||
hub_manager.add_known_hub(hub_info)
|
||||
|
||||
result = hub_manager.remove_known_hub("hub-1")
|
||||
|
||||
assert result is True
|
||||
assert "hub-1" not in hub_manager.known_hubs
|
||||
|
||||
def test_get_peer_list(self, hub_manager):
|
||||
"""Test getting peer list for an island"""
|
||||
peer_info1 = PeerInfo(
|
||||
node_id="peer-1",
|
||||
address="192.168.1.1",
|
||||
port=7071,
|
||||
island_id="test-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
peer_info2 = PeerInfo(
|
||||
node_id="peer-2",
|
||||
address="192.168.1.2",
|
||||
port=7072,
|
||||
island_id="other-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
hub_manager.register_peer(peer_info1)
|
||||
hub_manager.register_peer(peer_info2)
|
||||
|
||||
peers = hub_manager.get_peer_list("test-island-id")
|
||||
|
||||
assert len(peers) == 1
|
||||
assert peers[0].node_id == "peer-1"
|
||||
|
||||
def test_get_hub_list(self, hub_manager):
|
||||
"""Test getting hub list"""
|
||||
hub_info1 = HubInfo(
|
||||
node_id="hub-1",
|
||||
address="10.1.1.1",
|
||||
port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island"
|
||||
)
|
||||
hub_info2 = HubInfo(
|
||||
node_id="hub-2",
|
||||
address="10.1.1.2",
|
||||
port=7070,
|
||||
island_id="other-island-id",
|
||||
island_name="other-island"
|
||||
)
|
||||
hub_manager.add_known_hub(hub_info1)
|
||||
hub_manager.add_known_hub(hub_info2)
|
||||
|
||||
hubs = hub_manager.get_hub_list("test-island-id")
|
||||
|
||||
assert len(hubs) == 1
|
||||
assert hubs[0].node_id == "hub-1"
|
||||
|
||||
def test_update_peer_last_seen(self, hub_manager):
|
||||
"""Test updating peer last seen time"""
|
||||
peer_info = PeerInfo(
|
||||
node_id="peer-1",
|
||||
address="192.168.1.1",
|
||||
port=7071,
|
||||
island_id="test-island-id",
|
||||
is_hub=False,
|
||||
last_seen=100.0
|
||||
)
|
||||
hub_manager.register_peer(peer_info)
|
||||
|
||||
hub_manager.update_peer_last_seen("peer-1")
|
||||
|
||||
assert hub_manager.peer_registry["peer-1"].last_seen > 100.0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__])
|
||||
244
apps/blockchain-node/tests/network/test_island_join.py
Normal file
244
apps/blockchain-node/tests/network/test_island_join.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""
|
||||
Tests for Island Join functionality
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import asyncio
|
||||
from unittest.mock import Mock, AsyncMock, patch, MagicMock
|
||||
from aitbc_chain.network.hub_manager import HubManager, HubInfo, PeerInfo
|
||||
from aitbc_chain.p2p_network import P2PNetworkService
|
||||
|
||||
|
||||
class TestHubManagerJoin:
|
||||
"""Test cases for HubManager join request handling"""
|
||||
|
||||
@pytest.fixture
|
||||
def hub_manager(self):
|
||||
"""Create a HubManager instance for testing"""
|
||||
return HubManager(
|
||||
local_node_id="test-hub-node",
|
||||
local_address="127.0.0.1",
|
||||
local_port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
redis_url="redis://localhost:6379"
|
||||
)
|
||||
|
||||
def test_get_blockchain_credentials(self, hub_manager):
|
||||
"""Test blockchain credentials retrieval"""
|
||||
with patch('aitbc_chain.network.hub_manager.os.path.exists', return_value=True):
|
||||
with patch('aitbc_chain.network.hub_manager.open', create=True) as mock_open:
|
||||
# Mock genesis.json
|
||||
genesis_data = {
|
||||
'blocks': [{'hash': 'test-genesis-hash'}]
|
||||
}
|
||||
mock_file = MagicMock()
|
||||
mock_file.read.return_value = '{"blocks": [{"hash": "test-genesis-hash"}]}'
|
||||
mock_open.return_value.__enter__.return_value = mock_file
|
||||
|
||||
# Mock keystore
|
||||
with patch('aitbc_chain.network.hub_manager.json.load') as mock_json_load:
|
||||
mock_json_load.return_value = {'0x123': {'public_key_pem': 'test-key'}}
|
||||
|
||||
credentials = hub_manager._get_blockchain_credentials()
|
||||
|
||||
assert credentials is not None
|
||||
assert 'chain_id' in credentials
|
||||
assert 'island_id' in credentials
|
||||
assert credentials['island_id'] == 'test-island-id'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_join_request_success(self, hub_manager):
|
||||
"""Test successful join request handling"""
|
||||
# Add some peers to the registry
|
||||
peer_info = PeerInfo(
|
||||
node_id="peer-1",
|
||||
address="192.168.1.1",
|
||||
port=7071,
|
||||
island_id="test-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
hub_manager.register_peer(peer_info)
|
||||
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': 'new-node',
|
||||
'island_id': 'test-island-id',
|
||||
'island_name': 'test-island',
|
||||
'public_key_pem': 'test-pem'
|
||||
}
|
||||
|
||||
with patch.object(hub_manager, '_get_blockchain_credentials', return_value={'chain_id': 'test-chain'}):
|
||||
response = await hub_manager.handle_join_request(join_request)
|
||||
|
||||
assert response is not None
|
||||
assert response['type'] == 'join_response'
|
||||
assert response['island_id'] == 'test-island-id'
|
||||
assert len(response['members']) >= 1 # At least the hub itself
|
||||
assert 'credentials' in response
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_join_request_wrong_island(self, hub_manager):
|
||||
"""Test join request for wrong island"""
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': 'new-node',
|
||||
'island_id': 'wrong-island-id',
|
||||
'island_name': 'wrong-island',
|
||||
'public_key_pem': 'test-pem'
|
||||
}
|
||||
|
||||
response = await hub_manager.handle_join_request(join_request)
|
||||
|
||||
assert response is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_join_request_with_members(self, hub_manager):
|
||||
"""Test join request returns all island members"""
|
||||
# Add multiple peers
|
||||
for i in range(3):
|
||||
peer_info = PeerInfo(
|
||||
node_id=f"peer-{i}",
|
||||
address=f"192.168.1.{i}",
|
||||
port=7070 + i,
|
||||
island_id="test-island-id",
|
||||
is_hub=False
|
||||
)
|
||||
hub_manager.register_peer(peer_info)
|
||||
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': 'new-node',
|
||||
'island_id': 'test-island-id',
|
||||
'island_name': 'test-island',
|
||||
'public_key_pem': 'test-pem'
|
||||
}
|
||||
|
||||
with patch.object(hub_manager, '_get_blockchain_credentials', return_value={'chain_id': 'test-chain'}):
|
||||
response = await hub_manager.handle_join_request(join_request)
|
||||
|
||||
assert response is not None
|
||||
# Should include all peers + hub itself
|
||||
assert len(response['members']) >= 4
|
||||
|
||||
|
||||
class TestP2PNetworkJoin:
|
||||
"""Test cases for P2P network join request functionality"""
|
||||
|
||||
@pytest.fixture
|
||||
def p2p_service(self):
|
||||
"""Create a P2P service instance for testing"""
|
||||
return P2PNetworkService(
|
||||
host="127.0.0.1",
|
||||
port=7070,
|
||||
node_id="test-node",
|
||||
peers=[]
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_join_request_success(self, p2p_service):
|
||||
"""Test successful join request to hub"""
|
||||
join_response = {
|
||||
'type': 'join_response',
|
||||
'island_id': 'test-island-id',
|
||||
'island_name': 'test-island',
|
||||
'island_chain_id': 'test-chain',
|
||||
'members': [],
|
||||
'credentials': {}
|
||||
}
|
||||
|
||||
with patch('aitbc_chain.p2p_network.asyncio.open_connection') as mock_open:
|
||||
# Mock reader and writer
|
||||
mock_reader = AsyncMock()
|
||||
mock_reader.readline = AsyncMock(return_value=b'{"type": "join_response"}')
|
||||
mock_writer = AsyncMock()
|
||||
mock_writer.close = AsyncMock()
|
||||
mock_writer.wait_closed = AsyncMock()
|
||||
mock_open.return_value = (mock_reader, mock_writer)
|
||||
|
||||
response = await p2p_service.send_join_request(
|
||||
hub_address="127.0.0.1",
|
||||
hub_port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
node_id="test-node",
|
||||
public_key_pem="test-pem"
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
mock_open.assert_called_once_with("127.0.0.1", 7070)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_join_request_connection_refused(self, p2p_service):
|
||||
"""Test join request when hub refuses connection"""
|
||||
with patch('aitbc_chain.p2p_network.asyncio.open_connection') as mock_open:
|
||||
mock_open.side_effect = ConnectionRefusedError()
|
||||
|
||||
response = await p2p_service.send_join_request(
|
||||
hub_address="127.0.0.1",
|
||||
hub_port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
node_id="test-node",
|
||||
public_key_pem="test-pem"
|
||||
)
|
||||
|
||||
assert response is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_join_request_timeout(self, p2p_service):
|
||||
"""Test join request timeout"""
|
||||
with patch('aitbc_chain.p2p_network.asyncio.open_connection') as mock_open:
|
||||
# Mock reader that times out
|
||||
mock_reader = AsyncMock()
|
||||
mock_reader.readline = AsyncMock(side_effect=asyncio.TimeoutError())
|
||||
mock_writer = AsyncMock()
|
||||
mock_writer.close = AsyncMock()
|
||||
mock_writer.wait_closed = AsyncMock()
|
||||
mock_open.return_value = (mock_reader, mock_writer)
|
||||
|
||||
response = await p2p_service.send_join_request(
|
||||
hub_address="127.0.0.1",
|
||||
hub_port=7070,
|
||||
island_id="test-island-id",
|
||||
island_name="test-island",
|
||||
node_id="test-node",
|
||||
public_key_pem="test-pem"
|
||||
)
|
||||
|
||||
assert response is None
|
||||
|
||||
|
||||
class TestJoinMessageHandling:
|
||||
"""Test cases for join message handling in P2P network"""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_join_request_message_handling(self):
|
||||
"""Test that join_request messages are handled correctly"""
|
||||
service = P2PNetworkService(
|
||||
host="127.0.0.1",
|
||||
port=7070,
|
||||
node_id="test-node",
|
||||
peers=[]
|
||||
)
|
||||
|
||||
# Mock hub manager
|
||||
service.hub_manager = Mock()
|
||||
service.hub_manager.handle_join_request = AsyncMock(return_value={'type': 'join_response'})
|
||||
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': 'new-node',
|
||||
'island_id': 'test-island-id'
|
||||
}
|
||||
|
||||
# The actual message handling happens in _listen_to_stream
|
||||
# This test verifies the hub_manager.handle_join_request would be called
|
||||
response = await service.hub_manager.handle_join_request(join_request)
|
||||
|
||||
assert response is not None
|
||||
assert response['type'] == 'join_response'
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__])
|
||||
@@ -77,7 +77,7 @@ app.add_middleware(
|
||||
"http://localhost:3000",
|
||||
"http://localhost:8080",
|
||||
"http://localhost:8000",
|
||||
"http://localhost:3003"
|
||||
"http://localhost:8008"
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
|
||||
@@ -358,4 +358,4 @@ def health_check():
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=3003)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8008)
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Exchange API Routes - Add this to the existing nginx config
|
||||
|
||||
# Exchange API Routes
|
||||
location /api/trades/ {
|
||||
proxy_pass http://127.0.0.1:3003/api/trades/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
location /api/orders {
|
||||
proxy_pass http://127.0.0.1:3003/api/orders;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
@@ -347,7 +347,7 @@ class ExchangeAPIHandler(BaseHTTPRequestHandler):
|
||||
"error": str(e)
|
||||
}, 500)
|
||||
|
||||
def run_server(port=3003):
|
||||
def run_server(port=8008):
|
||||
"""Run the server"""
|
||||
init_db()
|
||||
|
||||
|
||||
556
cli/aitbc_cli/commands/exchange_island.py
Normal file
556
cli/aitbc_cli/commands/exchange_island.py
Normal file
@@ -0,0 +1,556 @@
|
||||
"""
|
||||
Exchange Island CLI Commands
|
||||
Commands for trading AIT coin against BTC and ETH on the island exchange
|
||||
"""
|
||||
|
||||
import click
|
||||
import json
|
||||
import hashlib
|
||||
import socket
|
||||
import os
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Optional
|
||||
from ..utils import output, error, success, info, warning
|
||||
from ..utils.island_credentials import (
|
||||
load_island_credentials, get_rpc_endpoint, get_chain_id,
|
||||
get_island_id, get_island_name
|
||||
)
|
||||
|
||||
|
||||
# Supported trading pairs
|
||||
SUPPORTED_PAIRS = ['AIT/BTC', 'AIT/ETH']
|
||||
|
||||
|
||||
@click.group()
|
||||
def exchange_island():
|
||||
"""Exchange commands for trading AIT against BTC and ETH on the island"""
|
||||
pass
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.argument('ait_amount', type=float)
|
||||
@click.argument('quote_currency', type=click.Choice(['BTC', 'ETH']))
|
||||
@click.option('--max-price', type=float, help='Maximum price to pay per AIT')
|
||||
@click.pass_context
|
||||
def buy(ctx, ait_amount: float, quote_currency: str, max_price: Optional[float]):
|
||||
"""Buy AIT with BTC or ETH"""
|
||||
try:
|
||||
if ait_amount <= 0:
|
||||
error("AIT amount must be greater than 0")
|
||||
raise click.Abort()
|
||||
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get user node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
# Get public key for node ID generation
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
user_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
else:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
pair = f"AIT/{quote_currency}"
|
||||
|
||||
# Generate order ID
|
||||
order_id = f"exchange_buy_{datetime.now().strftime('%Y%m%d%H%M%S')}_{hashlib.sha256(f'{user_id}{ait_amount}{quote_currency}'.encode()).hexdigest()[:8]}"
|
||||
|
||||
# Create buy order transaction
|
||||
buy_order_data = {
|
||||
'type': 'exchange',
|
||||
'action': 'buy',
|
||||
'order_id': order_id,
|
||||
'user_id': user_id,
|
||||
'pair': pair,
|
||||
'side': 'buy',
|
||||
'amount': float(ait_amount),
|
||||
'max_price': float(max_price) if max_price else None,
|
||||
'status': 'open',
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id,
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=buy_order_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
success(f"Buy order created successfully!")
|
||||
success(f"Order ID: {order_id}")
|
||||
success(f"Buying {ait_amount} AIT with {quote_currency}")
|
||||
|
||||
if max_price:
|
||||
success(f"Max price: {max_price:.8f} {quote_currency}/AIT")
|
||||
|
||||
order_info = {
|
||||
"Order ID": order_id,
|
||||
"Pair": pair,
|
||||
"Side": "BUY",
|
||||
"Amount": f"{ait_amount} AIT",
|
||||
"Max Price": f"{max_price:.8f} {quote_currency}/AIT" if max_price else "Market",
|
||||
"Status": "open",
|
||||
"User": user_id[:16] + "...",
|
||||
"Island": island_id[:16] + "..."
|
||||
}
|
||||
|
||||
output(order_info, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to submit transaction: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error creating buy order: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.argument('ait_amount', type=float)
|
||||
@click.argument('quote_currency', type=click.Choice(['BTC', 'ETH']))
|
||||
@click.option('--min-price', type=float, help='Minimum price to accept per AIT')
|
||||
@click.pass_context
|
||||
def sell(ctx, ait_amount: float, quote_currency: str, min_price: Optional[float]):
|
||||
"""Sell AIT for BTC or ETH"""
|
||||
try:
|
||||
if ait_amount <= 0:
|
||||
error("AIT amount must be greater than 0")
|
||||
raise click.Abort()
|
||||
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get user node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
# Get public key for node ID generation
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
user_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
else:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
pair = f"AIT/{quote_currency}"
|
||||
|
||||
# Generate order ID
|
||||
order_id = f"exchange_sell_{datetime.now().strftime('%Y%m%d%H%M%S')}_{hashlib.sha256(f'{user_id}{ait_amount}{quote_currency}'.encode()).hexdigest()[:8]}"
|
||||
|
||||
# Create sell order transaction
|
||||
sell_order_data = {
|
||||
'type': 'exchange',
|
||||
'action': 'sell',
|
||||
'order_id': order_id,
|
||||
'user_id': user_id,
|
||||
'pair': pair,
|
||||
'side': 'sell',
|
||||
'amount': float(ait_amount),
|
||||
'min_price': float(min_price) if min_price else None,
|
||||
'status': 'open',
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id,
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=sell_order_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
success(f"Sell order created successfully!")
|
||||
success(f"Order ID: {order_id}")
|
||||
success(f"Selling {ait_amount} AIT for {quote_currency}")
|
||||
|
||||
if min_price:
|
||||
success(f"Min price: {min_price:.8f} {quote_currency}/AIT")
|
||||
|
||||
order_info = {
|
||||
"Order ID": order_id,
|
||||
"Pair": pair,
|
||||
"Side": "SELL",
|
||||
"Amount": f"{ait_amount} AIT",
|
||||
"Min Price": f"{min_price:.8f} {quote_currency}/AIT" if min_price else "Market",
|
||||
"Status": "open",
|
||||
"User": user_id[:16] + "...",
|
||||
"Island": island_id[:16] + "..."
|
||||
}
|
||||
|
||||
output(order_info, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to submit transaction: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error creating sell order: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.argument('pair', type=click.Choice(SUPPORTED_PAIRS))
|
||||
@click.option('--limit', type=int, default=20, help='Order book depth')
|
||||
@click.pass_context
|
||||
def orderbook(ctx, pair: str, limit: int):
|
||||
"""View the order book for a trading pair"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for exchange orders
|
||||
try:
|
||||
import httpx
|
||||
params = {
|
||||
'transaction_type': 'exchange',
|
||||
'island_id': island_id,
|
||||
'pair': pair,
|
||||
'status': 'open',
|
||||
'limit': limit * 2 # Get both buys and sells
|
||||
}
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
orders = response.json()
|
||||
|
||||
# Separate buy and sell orders
|
||||
buy_orders = []
|
||||
sell_orders = []
|
||||
|
||||
for order in orders:
|
||||
if order.get('side') == 'buy':
|
||||
buy_orders.append(order)
|
||||
elif order.get('side') == 'sell':
|
||||
sell_orders.append(order)
|
||||
|
||||
# Sort buy orders by price descending (highest first)
|
||||
buy_orders.sort(key=lambda x: x.get('max_price', 0), reverse=True)
|
||||
# Sort sell orders by price ascending (lowest first)
|
||||
sell_orders.sort(key=lambda x: x.get('min_price', float('inf')))
|
||||
|
||||
if not buy_orders and not sell_orders:
|
||||
info(f"No open orders for {pair}")
|
||||
return
|
||||
|
||||
# Display sell orders (asks)
|
||||
if sell_orders:
|
||||
asks_data = []
|
||||
for order in sell_orders[:limit]:
|
||||
asks_data.append({
|
||||
"Price": f"{order.get('min_price', 0):.8f}",
|
||||
"Amount": f"{order.get('amount', 0):.4f} AIT",
|
||||
"Total": f"{order.get('min_price', 0) * order.get('amount', 0):.8f} {pair.split('/')[1]}",
|
||||
"User": order.get('user_id', '')[:16] + "...",
|
||||
"Order": order.get('order_id', '')[:16] + "..."
|
||||
})
|
||||
|
||||
output(asks_data, ctx.obj.get('output_format', 'table'), title=f"Sell Orders (Asks) - {pair}")
|
||||
|
||||
# Display buy orders (bids)
|
||||
if buy_orders:
|
||||
bids_data = []
|
||||
for order in buy_orders[:limit]:
|
||||
bids_data.append({
|
||||
"Price": f"{order.get('max_price', 0):.8f}",
|
||||
"Amount": f"{order.get('amount', 0):.4f} AIT",
|
||||
"Total": f"{order.get('max_price', 0) * order.get('amount', 0):.8f} {pair.split('/')[1]}",
|
||||
"User": order.get('user_id', '')[:16] + "...",
|
||||
"Order": order.get('order_id', '')[:16] + "..."
|
||||
})
|
||||
|
||||
output(bids_data, ctx.obj.get('output_format', 'table'), title=f"Buy Orders (Bids) - {pair}")
|
||||
|
||||
# Calculate spread if both exist
|
||||
if sell_orders and buy_orders:
|
||||
best_ask = sell_orders[0].get('min_price', 0)
|
||||
best_bid = buy_orders[0].get('max_price', 0)
|
||||
spread = best_ask - best_bid
|
||||
if best_bid > 0:
|
||||
spread_pct = (spread / best_bid) * 100
|
||||
info(f"Spread: {spread:.8f} ({spread_pct:.4f}%)")
|
||||
info(f"Best Bid: {best_bid:.8f} {pair.split('/')[1]}/AIT")
|
||||
info(f"Best Ask: {best_ask:.8f} {pair.split('/')[1]}/AIT")
|
||||
|
||||
else:
|
||||
error(f"Failed to query blockchain: {response.status_code}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error viewing order book: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.pass_context
|
||||
def rates(ctx):
|
||||
"""View current exchange rates for AIT/BTC and AIT/ETH"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for exchange orders to calculate rates
|
||||
try:
|
||||
import httpx
|
||||
rates_data = []
|
||||
|
||||
for pair in SUPPORTED_PAIRS:
|
||||
params = {
|
||||
'transaction_type': 'exchange',
|
||||
'island_id': island_id,
|
||||
'pair': pair,
|
||||
'status': 'open',
|
||||
'limit': 100
|
||||
}
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
orders = response.json()
|
||||
|
||||
# Calculate rates from order book
|
||||
buy_orders = [o for o in orders if o.get('side') == 'buy']
|
||||
sell_orders = [o for o in orders if o.get('side') == 'sell']
|
||||
|
||||
# Get best bid and ask
|
||||
best_bid = max([o.get('max_price', 0) for o in buy_orders]) if buy_orders else 0
|
||||
best_ask = min([o.get('min_price', float('inf')) for o in sell_orders]) if sell_orders else 0
|
||||
|
||||
# Calculate mid price
|
||||
mid_price = (best_bid + best_ask) / 2 if best_bid > 0 and best_ask < float('inf') else 0
|
||||
|
||||
rates_data.append({
|
||||
"Pair": pair,
|
||||
"Best Bid": f"{best_bid:.8f}" if best_bid > 0 else "N/A",
|
||||
"Best Ask": f"{best_ask:.8f}" if best_ask < float('inf') else "N/A",
|
||||
"Mid Price": f"{mid_price:.8f}" if mid_price > 0 else "N/A",
|
||||
"Buy Orders": len(buy_orders),
|
||||
"Sell Orders": len(sell_orders)
|
||||
})
|
||||
else:
|
||||
rates_data.append({
|
||||
"Pair": pair,
|
||||
"Best Bid": "Error",
|
||||
"Best Ask": "Error",
|
||||
"Mid Price": "Error",
|
||||
"Buy Orders": 0,
|
||||
"Sell Orders": 0
|
||||
})
|
||||
|
||||
output(rates_data, ctx.obj.get('output_format', 'table'), title="Exchange Rates")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error viewing exchange rates: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.option('--user', help='Filter by user ID')
|
||||
@click.option('--status', help='Filter by status (open, filled, partially_filled, cancelled)')
|
||||
@click.option('--pair', type=click.Choice(SUPPORTED_PAIRS), help='Filter by trading pair')
|
||||
@click.pass_context
|
||||
def orders(ctx, user: Optional[str], status: Optional[str], pair: Optional[str]):
|
||||
"""List exchange orders"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for exchange orders
|
||||
try:
|
||||
import httpx
|
||||
params = {
|
||||
'transaction_type': 'exchange',
|
||||
'island_id': island_id
|
||||
}
|
||||
if user:
|
||||
params['user_id'] = user
|
||||
if status:
|
||||
params['status'] = status
|
||||
if pair:
|
||||
params['pair'] = pair
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
orders = response.json()
|
||||
|
||||
if not orders:
|
||||
info("No exchange orders found")
|
||||
return
|
||||
|
||||
# Format output
|
||||
orders_data = []
|
||||
for order in orders:
|
||||
orders_data.append({
|
||||
"Order ID": order.get('order_id', '')[:20] + "...",
|
||||
"Pair": order.get('pair'),
|
||||
"Side": order.get('side', '').upper(),
|
||||
"Amount": f"{order.get('amount', 0):.4f} AIT",
|
||||
"Price": f"{order.get('max_price', order.get('min_price', 0)):.8f}" if order.get('max_price') or order.get('min_price') else "Market",
|
||||
"Status": order.get('status'),
|
||||
"User": order.get('user_id', '')[:16] + "...",
|
||||
"Created": order.get('created_at', '')[:19]
|
||||
})
|
||||
|
||||
output(orders_data, ctx.obj.get('output_format', 'table'), title=f"Exchange Orders ({island_id[:16]}...)")
|
||||
else:
|
||||
error(f"Failed to query blockchain: {response.status_code}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error listing orders: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@exchange_island.command()
|
||||
@click.argument('order_id')
|
||||
@click.pass_context
|
||||
def cancel(ctx, order_id: str):
|
||||
"""Cancel an exchange order"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get local node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
local_node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
# Create cancel transaction
|
||||
cancel_data = {
|
||||
'type': 'exchange',
|
||||
'action': 'cancel',
|
||||
'order_id': order_id,
|
||||
'user_id': local_node_id,
|
||||
'status': 'cancelled',
|
||||
'cancelled_at': datetime.now().isoformat(),
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=cancel_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
success(f"Order {order_id} cancelled successfully!")
|
||||
else:
|
||||
error(f"Failed to cancel order: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error cancelling order: {str(e)}")
|
||||
raise click.Abort()
|
||||
716
cli/aitbc_cli/commands/gpu_marketplace.py
Normal file
716
cli/aitbc_cli/commands/gpu_marketplace.py
Normal file
@@ -0,0 +1,716 @@
|
||||
"""
|
||||
GPU Marketplace CLI Commands
|
||||
Commands for bidding on and offering GPU power in the AITBC island marketplace
|
||||
"""
|
||||
|
||||
import click
|
||||
import json
|
||||
import hashlib
|
||||
import socket
|
||||
import os
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Optional, List
|
||||
from ..utils import output, error, success, info, warning
|
||||
from ..utils.island_credentials import (
|
||||
load_island_credentials, get_rpc_endpoint, get_chain_id,
|
||||
get_island_id, get_island_name
|
||||
)
|
||||
|
||||
|
||||
@click.group()
|
||||
def gpu():
|
||||
"""GPU marketplace commands for bidding and offering GPU power"""
|
||||
pass
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.argument('gpu_count', type=int)
|
||||
@click.argument('price_per_gpu', type=float)
|
||||
@click.argument('duration_hours', type=int)
|
||||
@click.option('--specs', help='GPU specifications (JSON string)')
|
||||
@click.option('--description', help='Description of the GPU offer')
|
||||
@click.pass_context
|
||||
def offer(ctx, gpu_count: int, price_per_gpu: float, duration_hours: int, specs: Optional[str], description: Optional[str]):
|
||||
"""Offer GPU power for sale in the marketplace"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get provider node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
# Get public key for node ID generation
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
provider_node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
else:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
# Calculate total price
|
||||
total_price = price_per_gpu * gpu_count * duration_hours
|
||||
|
||||
# Generate offer ID
|
||||
offer_id = f"gpu_offer_{datetime.now().strftime('%Y%m%d%H%M%S')}_{hashlib.sha256(f'{provider_node_id}{gpu_count}{price_per_gpu}'.encode()).hexdigest()[:8]}"
|
||||
|
||||
# Parse specifications
|
||||
gpu_specs = {}
|
||||
if specs:
|
||||
try:
|
||||
gpu_specs = json.loads(specs)
|
||||
except json.JSONDecodeError:
|
||||
error("Invalid JSON specifications")
|
||||
raise click.Abort()
|
||||
|
||||
# Create offer transaction
|
||||
offer_data = {
|
||||
'type': 'gpu_marketplace',
|
||||
'action': 'offer',
|
||||
'offer_id': offer_id,
|
||||
'provider_node_id': provider_node_id,
|
||||
'gpu_count': gpu_count,
|
||||
'price_per_gpu': float(price_per_gpu),
|
||||
'duration_hours': duration_hours,
|
||||
'total_price': float(total_price),
|
||||
'status': 'active',
|
||||
'specs': gpu_specs,
|
||||
'description': description or f"{gpu_count} GPUs for {duration_hours} hours",
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id,
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=offer_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
success(f"GPU offer created successfully!")
|
||||
success(f"Offer ID: {offer_id}")
|
||||
success(f"Total Price: {total_price:.2f} AIT")
|
||||
|
||||
offer_info = {
|
||||
"Offer ID": offer_id,
|
||||
"GPU Count": gpu_count,
|
||||
"Price per GPU": f"{price_per_gpu:.4f} AIT/hour",
|
||||
"Duration": f"{duration_hours} hours",
|
||||
"Total Price": f"{total_price:.2f} AIT",
|
||||
"Status": "active",
|
||||
"Provider Node": provider_node_id[:16] + "...",
|
||||
"Island": island_id[:16] + "..."
|
||||
}
|
||||
|
||||
output(offer_info, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to submit transaction: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error creating GPU offer: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.argument('gpu_count', type=int)
|
||||
@click.argument('max_price', type=float)
|
||||
@click.argument('duration_hours', type=int)
|
||||
@click.option('--specs', help='Required GPU specifications (JSON string)')
|
||||
@click.pass_context
|
||||
def bid(ctx, gpu_count: int, max_price: float, duration_hours: int, specs: Optional[str]):
|
||||
"""Bid on GPU power in the marketplace"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get bidder node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
# Get public key for node ID generation
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
bidder_node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
else:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
# Calculate max total price
|
||||
max_total_price = max_price * gpu_count * duration_hours
|
||||
|
||||
# Generate bid ID
|
||||
bid_id = f"gpu_bid_{datetime.now().strftime('%Y%m%d%H%M%S')}_{hashlib.sha256(f'{bidder_node_id}{gpu_count}{max_price}'.encode()).hexdigest()[:8]}"
|
||||
|
||||
# Parse specifications
|
||||
gpu_specs = {}
|
||||
if specs:
|
||||
try:
|
||||
gpu_specs = json.loads(specs)
|
||||
except json.JSONDecodeError:
|
||||
error("Invalid JSON specifications")
|
||||
raise click.Abort()
|
||||
|
||||
# Create bid transaction
|
||||
bid_data = {
|
||||
'type': 'gpu_marketplace',
|
||||
'action': 'bid',
|
||||
'bid_id': bid_id,
|
||||
'bidder_node_id': bidder_node_id,
|
||||
'gpu_count': gpu_count,
|
||||
'max_price_per_gpu': float(max_price),
|
||||
'duration_hours': duration_hours,
|
||||
'max_total_price': float(max_total_price),
|
||||
'status': 'pending',
|
||||
'specs': gpu_specs,
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id,
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/v1/transactions",
|
||||
json=bid_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
success(f"GPU bid created successfully!")
|
||||
success(f"Bid ID: {bid_id}")
|
||||
success(f"Max Total Price: {max_total_price:.2f} AIT")
|
||||
|
||||
bid_info = {
|
||||
"Bid ID": bid_id,
|
||||
"GPU Count": gpu_count,
|
||||
"Max Price per GPU": f"{max_price:.4f} AIT/hour",
|
||||
"Duration": f"{duration_hours} hours",
|
||||
"Max Total Price": f"{max_total_price:.2f} AIT",
|
||||
"Status": "pending",
|
||||
"Bidder Node": bidder_node_id[:16] + "...",
|
||||
"Island": island_id[:16] + "..."
|
||||
}
|
||||
|
||||
output(bid_info, ctx.obj.get('output_format', 'table'))
|
||||
else:
|
||||
error(f"Failed to submit transaction: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error creating GPU bid: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.option('--provider', help='Filter by provider node ID')
|
||||
@click.option('--status', help='Filter by status (active, pending, accepted, completed, cancelled)')
|
||||
@click.option('--type', type=click.Choice(['offer', 'bid', 'all']), default='all', help='Filter by type')
|
||||
@click.pass_context
|
||||
def list(ctx, provider: Optional[str], status: Optional[str], type: str):
|
||||
"""List GPU marketplace offers and bids"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for GPU marketplace transactions
|
||||
try:
|
||||
import httpx
|
||||
params = {
|
||||
'transaction_type': 'gpu_marketplace',
|
||||
'island_id': island_id
|
||||
}
|
||||
if provider:
|
||||
params['provider_node_id'] = provider
|
||||
if status:
|
||||
params['status'] = status
|
||||
if type != 'all':
|
||||
params['action'] = type
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
transactions = response.json()
|
||||
|
||||
if not transactions:
|
||||
info("No GPU marketplace transactions found")
|
||||
return
|
||||
|
||||
# Format output
|
||||
market_data = []
|
||||
for tx in transactions:
|
||||
action = tx.get('action')
|
||||
if action == 'offer':
|
||||
market_data.append({
|
||||
"ID": tx.get('offer_id', tx.get('transaction_id', 'N/A'))[:20] + "...",
|
||||
"Type": "OFFER",
|
||||
"GPU Count": tx.get('gpu_count'),
|
||||
"Price": f"{tx.get('price_per_gpu', 0):.4f} AIT/h",
|
||||
"Duration": f"{tx.get('duration_hours')}h",
|
||||
"Total": f"{tx.get('total_price', 0):.2f} AIT",
|
||||
"Status": tx.get('status'),
|
||||
"Provider": tx.get('provider_node_id', '')[:16] + "...",
|
||||
"Created": tx.get('created_at', '')[:19]
|
||||
})
|
||||
elif action == 'bid':
|
||||
market_data.append({
|
||||
"ID": tx.get('bid_id', tx.get('transaction_id', 'N/A'))[:20] + "...",
|
||||
"Type": "BID",
|
||||
"GPU Count": tx.get('gpu_count'),
|
||||
"Max Price": f"{tx.get('max_price_per_gpu', 0):.4f} AIT/h",
|
||||
"Duration": f"{tx.get('duration_hours')}h",
|
||||
"Max Total": f"{tx.get('max_total_price', 0):.2f} AIT",
|
||||
"Status": tx.get('status'),
|
||||
"Bidder": tx.get('bidder_node_id', '')[:16] + "...",
|
||||
"Created": tx.get('created_at', '')[:19]
|
||||
})
|
||||
|
||||
output(market_data, ctx.obj.get('output_format', 'table'), title=f"GPU Marketplace ({island_id[:16]}...)")
|
||||
else:
|
||||
error(f"Failed to query blockchain: {response.status_code}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error listing GPU marketplace: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.argument('order_id')
|
||||
@click.pass_context
|
||||
def cancel(ctx, order_id: str):
|
||||
"""Cancel a GPU offer or bid"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get local node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
local_node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
# Determine if it's an offer or bid
|
||||
if order_id.startswith('gpu_offer'):
|
||||
action = 'cancel_offer'
|
||||
node_id_field = 'provider_node_id'
|
||||
elif order_id.startswith('gpu_bid'):
|
||||
action = 'cancel_bid'
|
||||
node_id_field = 'bidder_node_id'
|
||||
else:
|
||||
error("Invalid order ID format. Must start with 'gpu_offer' or 'gpu_bid'")
|
||||
raise click.Abort()
|
||||
|
||||
# Create cancel transaction
|
||||
cancel_data = {
|
||||
'type': 'gpu_marketplace',
|
||||
'action': action,
|
||||
'order_id': order_id,
|
||||
'node_id': local_node_id,
|
||||
'status': 'cancelled',
|
||||
'cancelled_at': datetime.now().isoformat(),
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=cancel_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
success(f"Order {order_id} cancelled successfully!")
|
||||
else:
|
||||
error(f"Failed to cancel order: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error cancelling order: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.argument('bid_id')
|
||||
@click.pass_context
|
||||
def accept(ctx, bid_id: str):
|
||||
"""Accept a GPU bid (provider only)"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
chain_id = get_chain_id()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Get provider node ID
|
||||
hostname = socket.gethostname()
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
p2p_port = credentials.get('credentials', {}).get('p2p_port', 8001)
|
||||
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
public_key_pem = None
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
if public_key_pem:
|
||||
content = f"{hostname}:{local_address}:{p2p_port}:{public_key_pem}"
|
||||
provider_node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
else:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
# Create accept transaction
|
||||
accept_data = {
|
||||
'type': 'gpu_marketplace',
|
||||
'action': 'accept',
|
||||
'bid_id': bid_id,
|
||||
'provider_node_id': provider_node_id,
|
||||
'status': 'accepted',
|
||||
'accepted_at': datetime.now().isoformat(),
|
||||
'island_id': island_id,
|
||||
'chain_id': chain_id
|
||||
}
|
||||
|
||||
# Submit transaction to blockchain
|
||||
try:
|
||||
import httpx
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=accept_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
success(f"Bid {bid_id} accepted successfully!")
|
||||
else:
|
||||
error(f"Failed to accept bid: {response.status_code}")
|
||||
if response.text:
|
||||
error(f"Error details: {response.text}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error submitting transaction: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error accepting bid: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.argument('order_id')
|
||||
@click.pass_context
|
||||
def status(ctx, order_id: str):
|
||||
"""Check the status of a GPU order"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for the order
|
||||
try:
|
||||
import httpx
|
||||
params = {
|
||||
'transaction_type': 'gpu_marketplace',
|
||||
'island_id': island_id,
|
||||
'order_id': order_id
|
||||
}
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
transactions = response.json()
|
||||
|
||||
if not transactions:
|
||||
error(f"Order {order_id} not found")
|
||||
raise click.Abort()
|
||||
|
||||
tx = transactions[0]
|
||||
action = tx.get('action')
|
||||
|
||||
order_info = {
|
||||
"Order ID": order_id,
|
||||
"Type": action.upper(),
|
||||
"Status": tx.get('status'),
|
||||
"Created": tx.get('created_at'),
|
||||
}
|
||||
|
||||
if action == 'offer':
|
||||
order_info.update({
|
||||
"GPU Count": tx.get('gpu_count'),
|
||||
"Price per GPU": f"{tx.get('price_per_gpu', 0):.4f} AIT/h",
|
||||
"Duration": f"{tx.get('duration_hours')}h",
|
||||
"Total Price": f"{tx.get('total_price', 0):.2f} AIT",
|
||||
"Provider": tx.get('provider_node_id', '')[:16] + "..."
|
||||
})
|
||||
elif action == 'bid':
|
||||
order_info.update({
|
||||
"GPU Count": tx.get('gpu_count'),
|
||||
"Max Price": f"{tx.get('max_price_per_gpu', 0):.4f} AIT/h",
|
||||
"Duration": f"{tx.get('duration_hours')}h",
|
||||
"Max Total": f"{tx.get('max_total_price', 0):.2f} AIT",
|
||||
"Bidder": tx.get('bidder_node_id', '')[:16] + "..."
|
||||
})
|
||||
|
||||
if 'accepted_at' in tx:
|
||||
order_info["Accepted"] = tx['accepted_at']
|
||||
if 'cancelled_at' in tx:
|
||||
order_info["Cancelled"] = tx['cancelled_at']
|
||||
|
||||
output(order_info, ctx.obj.get('output_format', 'table'), title=f"Order Status: {order_id}")
|
||||
else:
|
||||
error(f"Failed to query blockchain: {response.status_code}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error checking order status: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.pass_context
|
||||
def match(ctx):
|
||||
"""Match GPU bids with offers (price discovery)"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Query blockchain for open offers and bids
|
||||
try:
|
||||
import httpx
|
||||
params = {
|
||||
'transaction_type': 'gpu_marketplace',
|
||||
'island_id': island_id,
|
||||
'status': 'active'
|
||||
}
|
||||
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{rpc_endpoint}/transactions",
|
||||
params=params,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
transactions = response.json()
|
||||
|
||||
# Separate offers and bids
|
||||
offers = []
|
||||
bids = []
|
||||
|
||||
for tx in transactions:
|
||||
if tx.get('action') == 'offer':
|
||||
offers.append(tx)
|
||||
elif tx.get('action') == 'bid':
|
||||
bids.append(tx)
|
||||
|
||||
if not offers or not bids:
|
||||
info("No active offers or bids to match")
|
||||
return
|
||||
|
||||
# Sort offers by price (lowest first)
|
||||
offers.sort(key=lambda x: x.get('price_per_gpu', float('inf')))
|
||||
# Sort bids by price (highest first)
|
||||
bids.sort(key=lambda x: x.get('max_price_per_gpu', 0), reverse=True)
|
||||
|
||||
# Match bids with offers
|
||||
matches = []
|
||||
for bid in bids:
|
||||
for offer in offers:
|
||||
# Check if bid price >= offer price
|
||||
if bid.get('max_price_per_gpu', 0) >= offer.get('price_per_gpu', float('inf')):
|
||||
# Check if GPU count matches
|
||||
if bid.get('gpu_count') == offer.get('gpu_count'):
|
||||
# Check if duration matches
|
||||
if bid.get('duration_hours') == offer.get('duration_hours'):
|
||||
# Create match transaction
|
||||
match_data = {
|
||||
'type': 'gpu_marketplace',
|
||||
'action': 'match',
|
||||
'bid_id': bid.get('bid_id'),
|
||||
'offer_id': offer.get('offer_id'),
|
||||
'bidder_node_id': bid.get('bidder_node_id'),
|
||||
'provider_node_id': offer.get('provider_node_id'),
|
||||
'gpu_count': bid.get('gpu_count'),
|
||||
'matched_price': offer.get('price_per_gpu'),
|
||||
'duration_hours': bid.get('duration_hours'),
|
||||
'total_price': offer.get('total_price'),
|
||||
'status': 'matched',
|
||||
'matched_at': datetime.now().isoformat(),
|
||||
'island_id': island_id,
|
||||
'chain_id': get_chain_id()
|
||||
}
|
||||
|
||||
# Submit match transaction
|
||||
match_response = client.post(
|
||||
f"{rpc_endpoint}/transaction",
|
||||
json=match_data,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
if match_response.status_code == 200:
|
||||
matches.append({
|
||||
"Bid ID": bid.get('bid_id')[:16] + "...",
|
||||
"Offer ID": offer.get('offer_id')[:16] + "...",
|
||||
"GPU Count": bid.get('gpu_count'),
|
||||
"Matched Price": f"{offer.get('price_per_gpu', 0):.4f} AIT/h",
|
||||
"Total Price": f"{offer.get('total_price', 0):.2f} AIT",
|
||||
"Duration": f"{bid.get('duration_hours')}h"
|
||||
})
|
||||
|
||||
if matches:
|
||||
success(f"Matched {len(matches)} GPU orders!")
|
||||
output(matches, ctx.obj.get('output_format', 'table'), title="GPU Order Matches")
|
||||
else:
|
||||
info("No matching orders found")
|
||||
else:
|
||||
error(f"Failed to query blockchain: {response.status_code}")
|
||||
raise click.Abort()
|
||||
except Exception as e:
|
||||
error(f"Network error querying blockchain: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error matching orders: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
@gpu.command()
|
||||
@click.pass_context
|
||||
def providers(ctx):
|
||||
"""Query island members for GPU providers"""
|
||||
try:
|
||||
# Load island credentials
|
||||
credentials = load_island_credentials()
|
||||
island_id = get_island_id()
|
||||
|
||||
# Load island members from credentials
|
||||
members = credentials.get('members', [])
|
||||
|
||||
if not members:
|
||||
warning("No island members found in credentials")
|
||||
return
|
||||
|
||||
# Query each member for GPU availability via P2P
|
||||
info(f"Querying {len(members)} island members for GPU availability...")
|
||||
|
||||
# For now, display the members
|
||||
# In a full implementation, this would use P2P network to query each member
|
||||
provider_data = []
|
||||
for member in members:
|
||||
provider_data.append({
|
||||
"Node ID": member.get('node_id', '')[:16] + "...",
|
||||
"Address": member.get('address', 'N/A'),
|
||||
"Port": member.get('port', 'N/A'),
|
||||
"Is Hub": member.get('is_hub', False),
|
||||
"Public Address": member.get('public_address', 'N/A'),
|
||||
"Public Port": member.get('public_port', 'N/A')
|
||||
})
|
||||
|
||||
output(provider_data, ctx.obj.get('output_format', 'table'), title=f"Island Members ({island_id[:16]}...)")
|
||||
info("Note: GPU availability query via P2P network to be implemented")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error querying GPU providers: {str(e)}")
|
||||
raise click.Abort()
|
||||
@@ -1,7 +1,19 @@
|
||||
"""Node management commands for AITBC CLI"""
|
||||
"""
|
||||
Node management commands for AITBC
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import socket
|
||||
import json
|
||||
import hashlib
|
||||
import click
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
from ..utils.output import output, success, error, warning, info
|
||||
from ..core.config import MultiChainConfig, load_multichain_config, get_default_node_config, add_node_config, remove_node_config
|
||||
from ..core.node_client import NodeClient
|
||||
from ..utils import output, error, success
|
||||
@@ -480,23 +492,107 @@ def create(ctx, island_id, island_name, chain_id):
|
||||
@click.argument('island_id')
|
||||
@click.argument('island_name')
|
||||
@click.argument('chain_id')
|
||||
@click.option('--hub', default='hub.aitbc.bubuit.net', help='Hub domain name to connect to')
|
||||
@click.option('--is-hub', is_flag=True, help='Register this node as a hub for the island')
|
||||
@click.pass_context
|
||||
def join(ctx, island_id, island_name, chain_id, is_hub):
|
||||
def join(ctx, island_id, island_name, chain_id, hub, is_hub):
|
||||
"""Join an existing island"""
|
||||
try:
|
||||
join_info = {
|
||||
"Island ID": island_id,
|
||||
"Island Name": island_name,
|
||||
"Chain ID": chain_id,
|
||||
"As Hub": is_hub
|
||||
}
|
||||
|
||||
output(join_info, ctx.obj.get('output_format', 'table'), title=f"Joining Island: {island_name}")
|
||||
success(f"Successfully joined island {island_name}")
|
||||
|
||||
# Note: In a real implementation, this would update the island manager
|
||||
|
||||
# Get system hostname
|
||||
hostname = socket.gethostname()
|
||||
|
||||
# Get public key from keystore
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
public_key_pem = None
|
||||
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's public key
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
if not public_key_pem:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
|
||||
# Generate node_id using hostname-based method
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
local_port = 8001 # Default hub port
|
||||
content = f"{hostname}:{local_address}:{local_port}:{public_key_pem}"
|
||||
node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
# Resolve hub domain to IP
|
||||
hub_ip = socket.gethostbyname(hub)
|
||||
hub_port = 8001 # Default hub port
|
||||
|
||||
info(f"Connecting to hub {hub} ({hub_ip}:{hub_port})...")
|
||||
|
||||
# Create P2P network service instance for sending join request
|
||||
sys.path.insert(0, '/opt/aitbc/apps/blockchain-node/src')
|
||||
from aitbc_chain.p2p_network import P2PNetworkService
|
||||
|
||||
# Create a minimal P2P service just for sending the join request
|
||||
p2p_service = P2PNetworkService(local_address, local_port, node_id, [])
|
||||
|
||||
# Send join request
|
||||
async def send_join():
|
||||
return await p2p_service.send_join_request(
|
||||
hub_ip, hub_port, island_id, island_name, node_id, public_key_pem
|
||||
)
|
||||
|
||||
response = asyncio.run(send_join())
|
||||
|
||||
if response:
|
||||
# Store credentials locally
|
||||
credentials_path = '/var/lib/aitbc/island_credentials.json'
|
||||
credentials_data = {
|
||||
"island_id": response.get('island_id'),
|
||||
"island_name": response.get('island_name'),
|
||||
"island_chain_id": response.get('island_chain_id'),
|
||||
"credentials": response.get('credentials'),
|
||||
"joined_at": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
with open(credentials_path, 'w') as f:
|
||||
json.dump(credentials_data, f, indent=2)
|
||||
|
||||
# Display join info
|
||||
join_info = {
|
||||
"Island ID": response.get('island_id'),
|
||||
"Island Name": response.get('island_name'),
|
||||
"Chain ID": response.get('island_chain_id'),
|
||||
"Member Count": len(response.get('members', [])),
|
||||
"Credentials Stored": credentials_path
|
||||
}
|
||||
|
||||
output(join_info, ctx.obj.get('output_format', 'table'), title=f"Joined Island: {island_name}")
|
||||
|
||||
# Display member list
|
||||
members = response.get('members', [])
|
||||
if members:
|
||||
output(members, ctx.obj.get('output_format', 'table'), title="Island Members")
|
||||
|
||||
# Display credentials
|
||||
credentials = response.get('credentials', {})
|
||||
if credentials:
|
||||
output(credentials, ctx.obj.get('output_format', 'table'), title="Blockchain Credentials")
|
||||
|
||||
success(f"Successfully joined island {island_name}")
|
||||
|
||||
# If registering as hub
|
||||
if is_hub:
|
||||
info("Registering as hub...")
|
||||
# Hub registration would happen here via the hub register command
|
||||
info("Run 'aitbc node hub register' to complete hub registration")
|
||||
else:
|
||||
error("Failed to join island - no response from hub")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error joining island: {str(e)}")
|
||||
raise click.Abort()
|
||||
@@ -568,57 +664,225 @@ def hub():
|
||||
@hub.command()
|
||||
@click.option('--public-address', help='Public IP address')
|
||||
@click.option('--public-port', type=int, help='Public port')
|
||||
@click.option('--redis-url', default='redis://localhost:6379', help='Redis URL for persistence')
|
||||
@click.option('--hub-discovery-url', default='hub.aitbc.bubuit.net', help='DNS hub discovery URL')
|
||||
@click.pass_context
|
||||
def register(ctx, public_address, public_port):
|
||||
def register(ctx, public_address, public_port, redis_url, hub_discovery_url):
|
||||
"""Register this node as a hub"""
|
||||
try:
|
||||
hub_info = {
|
||||
"Node ID": "local-node",
|
||||
"Status": "Registered",
|
||||
"Public Address": public_address or "auto-discovered",
|
||||
"Public Port": public_port or "auto-discovered"
|
||||
}
|
||||
|
||||
output(hub_info, ctx.obj.get('output_format', 'table'), title="Hub Registration")
|
||||
success("Successfully registered as hub")
|
||||
|
||||
# Note: In a real implementation, this would update the hub manager
|
||||
|
||||
# Get environment variables
|
||||
island_id = os.getenv('ISLAND_ID', 'default-island-id')
|
||||
island_name = os.getenv('ISLAND_NAME', 'default')
|
||||
|
||||
# Get system hostname
|
||||
hostname = socket.gethostname()
|
||||
|
||||
# Get public key from keystore
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
public_key_pem = None
|
||||
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's public key
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
if not public_key_pem:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
|
||||
# Generate node_id using hostname-based method
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
local_port = 7070 # Default hub port
|
||||
content = f"{hostname}:{local_address}:{local_port}:{public_key_pem}"
|
||||
node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
# Create HubManager instance
|
||||
sys.path.insert(0, '/opt/aitbc/apps/blockchain-node/src')
|
||||
from aitbc_chain.network.hub_manager import HubManager
|
||||
from aitbc_chain.network.hub_discovery import HubDiscovery
|
||||
|
||||
hub_manager = HubManager(
|
||||
node_id,
|
||||
local_address,
|
||||
local_port,
|
||||
island_id,
|
||||
island_name,
|
||||
redis_url
|
||||
)
|
||||
|
||||
# Register as hub (async)
|
||||
async def register_hub():
|
||||
success = await hub_manager.register_as_hub(public_address, public_port)
|
||||
if success:
|
||||
# Register with DNS discovery service
|
||||
hub_discovery = HubDiscovery(hub_discovery_url, local_port)
|
||||
hub_info_dict = {
|
||||
"node_id": node_id,
|
||||
"address": local_address,
|
||||
"port": local_port,
|
||||
"island_id": island_id,
|
||||
"island_name": island_name,
|
||||
"public_address": public_address,
|
||||
"public_port": public_port,
|
||||
"public_key_pem": public_key_pem
|
||||
}
|
||||
dns_success = await hub_discovery.register_hub(hub_info_dict)
|
||||
return success and dns_success
|
||||
return False
|
||||
|
||||
result = asyncio.run(register_hub())
|
||||
|
||||
if result:
|
||||
hub_info = {
|
||||
"Node ID": node_id,
|
||||
"Hostname": hostname,
|
||||
"Address": local_address,
|
||||
"Port": local_port,
|
||||
"Island ID": island_id,
|
||||
"Island Name": island_name,
|
||||
"Public Address": public_address or "auto-discovered",
|
||||
"Public Port": public_port or "auto-discovered",
|
||||
"Status": "Registered"
|
||||
}
|
||||
|
||||
output(hub_info, ctx.obj.get('output_format', 'table'), title="Hub Registration")
|
||||
success("Successfully registered as hub")
|
||||
else:
|
||||
error("Failed to register as hub")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error registering as hub: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@hub.command()
|
||||
@click.option('--redis-url', default='redis://localhost:6379', help='Redis URL for persistence')
|
||||
@click.option('--hub-discovery-url', default='hub.aitbc.bubuit.net', help='DNS hub discovery URL')
|
||||
@click.pass_context
|
||||
def unregister(ctx):
|
||||
def unregister(ctx, redis_url, hub_discovery_url):
|
||||
"""Unregister this node as a hub"""
|
||||
try:
|
||||
success("Successfully unregistered as hub")
|
||||
|
||||
# Note: In a real implementation, this would update the hub manager
|
||||
|
||||
# Get environment variables
|
||||
island_id = os.getenv('ISLAND_ID', 'default-island-id')
|
||||
island_name = os.getenv('ISLAND_NAME', 'default')
|
||||
|
||||
# Get system hostname
|
||||
hostname = socket.gethostname()
|
||||
|
||||
# Get public key from keystore
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
public_key_pem = None
|
||||
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's public key
|
||||
for key_id, key_data in keys.items():
|
||||
public_key_pem = key_data.get('public_key_pem')
|
||||
break
|
||||
else:
|
||||
error(f"Keystore not found at {keystore_path}")
|
||||
raise click.Abort()
|
||||
|
||||
if not public_key_pem:
|
||||
error("No public key found in keystore")
|
||||
raise click.Abort()
|
||||
|
||||
# Generate node_id using hostname-based method
|
||||
local_address = socket.gethostbyname(hostname)
|
||||
local_port = 7070 # Default hub port
|
||||
content = f"{hostname}:{local_address}:{local_port}:{public_key_pem}"
|
||||
node_id = hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
# Create HubManager instance
|
||||
sys.path.insert(0, '/opt/aitbc/apps/blockchain-node/src')
|
||||
from aitbc_chain.network.hub_manager import HubManager
|
||||
from aitbc_chain.network.hub_discovery import HubDiscovery
|
||||
|
||||
hub_manager = HubManager(
|
||||
node_id,
|
||||
local_address,
|
||||
local_port,
|
||||
island_id,
|
||||
island_name,
|
||||
redis_url
|
||||
)
|
||||
|
||||
# Unregister as hub (async)
|
||||
async def unregister_hub():
|
||||
success = await hub_manager.unregister_as_hub()
|
||||
if success:
|
||||
# Unregister from DNS discovery service
|
||||
hub_discovery = HubDiscovery(hub_discovery_url, local_port)
|
||||
dns_success = await hub_discovery.unregister_hub(node_id)
|
||||
return success and dns_success
|
||||
return False
|
||||
|
||||
result = asyncio.run(unregister_hub())
|
||||
|
||||
if result:
|
||||
hub_info = {
|
||||
"Node ID": node_id,
|
||||
"Status": "Unregistered"
|
||||
}
|
||||
|
||||
output(hub_info, ctx.obj.get('output_format', 'table'), title="Hub Unregistration")
|
||||
success("Successfully unregistered as hub")
|
||||
else:
|
||||
error("Failed to unregister as hub")
|
||||
raise click.Abort()
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error unregistering as hub: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
@hub.command()
|
||||
@click.option('--redis-url', default='redis://localhost:6379', help='Redis URL for persistence')
|
||||
@click.pass_context
|
||||
def list(ctx):
|
||||
"""List known hubs"""
|
||||
def list(ctx, redis_url):
|
||||
"""List registered hubs from Redis"""
|
||||
try:
|
||||
# Note: In a real implementation, this would query the hub manager
|
||||
hubs = [
|
||||
{
|
||||
"Node ID": "hub-node-1",
|
||||
"Address": "10.1.1.1",
|
||||
"Port": 7070,
|
||||
"Island ID": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"Peer Count": "5"
|
||||
}
|
||||
]
|
||||
|
||||
output(hubs, ctx.obj.get('output_format', 'table'), title="Known Hubs")
|
||||
|
||||
import redis.asyncio as redis
|
||||
|
||||
async def list_hubs():
|
||||
hubs = []
|
||||
try:
|
||||
r = redis.from_url(redis_url)
|
||||
# Get all hub keys
|
||||
keys = await r.keys("hub:*")
|
||||
for key in keys:
|
||||
value = await r.get(key)
|
||||
if value:
|
||||
hub_data = json.loads(value)
|
||||
hubs.append({
|
||||
"Node ID": hub_data.get("node_id"),
|
||||
"Address": hub_data.get("address"),
|
||||
"Port": hub_data.get("port"),
|
||||
"Island ID": hub_data.get("island_id"),
|
||||
"Island Name": hub_data.get("island_name"),
|
||||
"Public Address": hub_data.get("public_address", "N/A"),
|
||||
"Public Port": hub_data.get("public_port", "N/A"),
|
||||
"Peer Count": hub_data.get("peer_count", 0)
|
||||
})
|
||||
await r.close()
|
||||
except Exception as e:
|
||||
error(f"Failed to query Redis: {e}")
|
||||
return []
|
||||
return hubs
|
||||
|
||||
hubs = asyncio.run(list_hubs())
|
||||
|
||||
if hubs:
|
||||
output(hubs, ctx.obj.get('output_format', 'table'), title="Registered Hubs")
|
||||
else:
|
||||
info("No registered hubs found")
|
||||
|
||||
except Exception as e:
|
||||
error(f"Error listing hubs: {str(e)}")
|
||||
raise click.Abort()
|
||||
|
||||
181
cli/aitbc_cli/utils/island_credentials.py
Normal file
181
cli/aitbc_cli/utils/island_credentials.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""
|
||||
Island Credential Loading Utility
|
||||
Provides functions to load and validate island credentials from the local filesystem
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Dict, Optional
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
CREDENTIALS_PATH = '/var/lib/aitbc/island_credentials.json'
|
||||
|
||||
|
||||
def load_island_credentials() -> Dict:
|
||||
"""
|
||||
Load island credentials from the local filesystem
|
||||
|
||||
Returns:
|
||||
dict: Island credentials containing island_id, island_name, chain_id, credentials, etc.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If credentials file does not exist
|
||||
json.JSONDecodeError: If credentials file is invalid JSON
|
||||
ValueError: If credentials are invalid or missing required fields
|
||||
"""
|
||||
credentials_path = Path(CREDENTIALS_PATH)
|
||||
|
||||
if not credentials_path.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Island credentials not found at {CREDENTIALS_PATH}. "
|
||||
f"Run 'aitbc node island join' to join an island first."
|
||||
)
|
||||
|
||||
with open(credentials_path, 'r') as f:
|
||||
credentials = json.load(f)
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['island_id', 'island_name', 'island_chain_id', 'credentials']
|
||||
for field in required_fields:
|
||||
if field not in credentials:
|
||||
raise ValueError(f"Invalid credentials: missing required field '{field}'")
|
||||
|
||||
return credentials
|
||||
|
||||
|
||||
def get_rpc_endpoint() -> str:
|
||||
"""
|
||||
Get the RPC endpoint from island credentials
|
||||
|
||||
Returns:
|
||||
str: RPC endpoint URL
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If credentials file does not exist
|
||||
ValueError: If RPC endpoint is missing from credentials
|
||||
"""
|
||||
credentials = load_island_credentials()
|
||||
rpc_endpoint = credentials.get('credentials', {}).get('rpc_endpoint')
|
||||
|
||||
if not rpc_endpoint:
|
||||
raise ValueError("RPC endpoint not found in island credentials")
|
||||
|
||||
return rpc_endpoint
|
||||
|
||||
|
||||
def get_chain_id() -> str:
|
||||
"""
|
||||
Get the chain ID from island credentials
|
||||
|
||||
Returns:
|
||||
str: Chain ID
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If credentials file does not exist
|
||||
ValueError: If chain ID is missing from credentials
|
||||
"""
|
||||
credentials = load_island_credentials()
|
||||
chain_id = credentials.get('island_chain_id')
|
||||
|
||||
if not chain_id:
|
||||
raise ValueError("Chain ID not found in island credentials")
|
||||
|
||||
return chain_id
|
||||
|
||||
|
||||
def get_island_id() -> str:
|
||||
"""
|
||||
Get the island ID from island credentials
|
||||
|
||||
Returns:
|
||||
str: Island ID
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If credentials file does not exist
|
||||
ValueError: If island ID is missing from credentials
|
||||
"""
|
||||
credentials = load_island_credentials()
|
||||
island_id = credentials.get('island_id')
|
||||
|
||||
if not island_id:
|
||||
raise ValueError("Island ID not found in island credentials")
|
||||
|
||||
return island_id
|
||||
|
||||
|
||||
def get_island_name() -> str:
|
||||
"""
|
||||
Get the island name from island credentials
|
||||
|
||||
Returns:
|
||||
str: Island name
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If credentials file does not exist
|
||||
ValueError: If island name is missing from credentials
|
||||
"""
|
||||
credentials = load_island_credentials()
|
||||
island_name = credentials.get('island_name')
|
||||
|
||||
if not island_name:
|
||||
raise ValueError("Island name not found in island credentials")
|
||||
|
||||
return island_name
|
||||
|
||||
|
||||
def get_genesis_block_hash() -> Optional[str]:
|
||||
"""
|
||||
Get the genesis block hash from island credentials
|
||||
|
||||
Returns:
|
||||
str: Genesis block hash, or None if not available
|
||||
"""
|
||||
try:
|
||||
credentials = load_island_credentials()
|
||||
return credentials.get('credentials', {}).get('genesis_block_hash')
|
||||
except (FileNotFoundError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def get_genesis_address() -> Optional[str]:
|
||||
"""
|
||||
Get the genesis address from island credentials
|
||||
|
||||
Returns:
|
||||
str: Genesis address, or None if not available
|
||||
"""
|
||||
try:
|
||||
credentials = load_island_credentials()
|
||||
return credentials.get('credentials', {}).get('genesis_address')
|
||||
except (FileNotFoundError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def validate_credentials() -> bool:
|
||||
"""
|
||||
Validate that island credentials exist and are valid
|
||||
|
||||
Returns:
|
||||
bool: True if credentials are valid, False otherwise
|
||||
"""
|
||||
try:
|
||||
credentials = load_island_credentials()
|
||||
# Check for essential fields
|
||||
return all(key in credentials for key in ['island_id', 'island_name', 'island_chain_id', 'credentials'])
|
||||
except (FileNotFoundError, json.JSONDecodeError, ValueError):
|
||||
return False
|
||||
|
||||
|
||||
def get_p2p_port() -> Optional[int]:
|
||||
"""
|
||||
Get the P2P port from island credentials
|
||||
|
||||
Returns:
|
||||
int: P2P port, or None if not available
|
||||
"""
|
||||
try:
|
||||
credentials = load_island_credentials()
|
||||
return credentials.get('credentials', {}).get('p2p_port')
|
||||
except (FileNotFoundError, ValueError):
|
||||
return None
|
||||
@@ -7,6 +7,10 @@ import click
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Import island-specific commands
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
# Force version to 0.2.2
|
||||
__version__ = "0.2.2"
|
||||
|
||||
@@ -141,6 +145,8 @@ def cli(ctx, url, api_key, output, verbose, debug):
|
||||
# Add commands to CLI
|
||||
cli.add_command(system)
|
||||
cli.add_command(version)
|
||||
cli.add_command(gpu)
|
||||
cli.add_command(exchange_island)
|
||||
|
||||
if __name__ == '__main__':
|
||||
cli()
|
||||
|
||||
252
cli/tests/test_exchange_island.py
Normal file
252
cli/tests/test_exchange_island.py
Normal file
@@ -0,0 +1,252 @@
|
||||
"""
|
||||
Unit tests for Exchange Island CLI commands
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from click.testing import CliRunner
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_credentials_file(tmp_path):
|
||||
"""Create a temporary credentials file for testing"""
|
||||
credentials = {
|
||||
"island_id": "test-island-id-12345",
|
||||
"island_name": "test-island",
|
||||
"island_chain_id": "ait-test",
|
||||
"credentials": {
|
||||
"genesis_block_hash": "0x1234567890abcdef",
|
||||
"genesis_address": "0xabcdef1234567890",
|
||||
"rpc_endpoint": "http://localhost:8006",
|
||||
"p2p_port": 8001
|
||||
},
|
||||
"members": [],
|
||||
"joined_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
|
||||
# Monkey patch the credentials path
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "island_credentials.json")
|
||||
|
||||
# Write credentials to temp file
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
json.dump(credentials, f)
|
||||
|
||||
yield credentials
|
||||
|
||||
# Cleanup
|
||||
if os.path.exists(ic_module.CREDENTIALS_PATH):
|
||||
os.remove(ic_module.CREDENTIALS_PATH)
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_keystore(tmp_path):
|
||||
"""Create a temporary keystore for testing"""
|
||||
keystore = {
|
||||
"test_key_id": {
|
||||
"public_key_pem": "-----BEGIN PUBLIC KEY-----\ntest_public_key_data\n-----END PUBLIC KEY-----"
|
||||
}
|
||||
}
|
||||
|
||||
keystore_path = tmp_path / "validator_keys.json"
|
||||
with open(keystore_path, 'w') as f:
|
||||
json.dump(keystore, f)
|
||||
|
||||
# Monkey patch keystore path
|
||||
import aitbc_cli.commands.exchange_island as ei_module
|
||||
original_path = ei_module.__dict__.get('keystore_path')
|
||||
|
||||
yield str(keystore_path)
|
||||
|
||||
# Restore
|
||||
if original_path:
|
||||
ei_module.keystore_path = original_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
"""Create a Click CLI runner"""
|
||||
return CliRunner()
|
||||
|
||||
|
||||
def test_exchange_buy_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test exchange buy command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"transaction_id": "test_tx_id"}
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['buy', '100', 'BTC', '--max-price', '0.00001'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Buy order created successfully" in result.output
|
||||
|
||||
|
||||
def test_exchange_buy_command_invalid_amount(mock_credentials_file, runner):
|
||||
"""Test exchange buy command with invalid amount"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
result = runner.invoke(exchange_island, ['buy', '-10', 'BTC'])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "must be greater than 0" in result.output
|
||||
|
||||
|
||||
def test_exchange_sell_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test exchange sell command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"transaction_id": "test_tx_id"}
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['sell', '100', 'ETH', '--min-price', '0.0005'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "Sell order created successfully" in result.output
|
||||
|
||||
|
||||
def test_exchange_sell_command_invalid_amount(mock_credentials_file, runner):
|
||||
"""Test exchange sell command with invalid amount"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
result = runner.invoke(exchange_island, ['sell', '-10', 'ETH'])
|
||||
|
||||
assert result.exit_code != 0
|
||||
assert "must be greater than 0" in result.output
|
||||
|
||||
|
||||
def test_exchange_orderbook_command(mock_credentials_file, runner):
|
||||
"""Test exchange orderbook command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = [
|
||||
{
|
||||
"action": "buy",
|
||||
"order_id": "exchange_buy_test",
|
||||
"user_id": "test_user",
|
||||
"pair": "AIT/BTC",
|
||||
"side": "buy",
|
||||
"amount": 100.0,
|
||||
"max_price": 0.00001,
|
||||
"status": "open",
|
||||
"created_at": "2024-01-01T00:00:00"
|
||||
},
|
||||
{
|
||||
"action": "sell",
|
||||
"order_id": "exchange_sell_test",
|
||||
"user_id": "test_user2",
|
||||
"pair": "AIT/BTC",
|
||||
"side": "sell",
|
||||
"amount": 100.0,
|
||||
"min_price": 0.000009,
|
||||
"status": "open",
|
||||
"created_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['orderbook', 'AIT/BTC'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_exchange_rates_command(mock_credentials_file, runner):
|
||||
"""Test exchange rates command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = []
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['rates'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_exchange_orders_command(mock_credentials_file, runner):
|
||||
"""Test exchange orders command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = [
|
||||
{
|
||||
"action": "buy",
|
||||
"order_id": "exchange_buy_test",
|
||||
"user_id": "test_user",
|
||||
"pair": "AIT/BTC",
|
||||
"side": "buy",
|
||||
"amount": 100.0,
|
||||
"max_price": 0.00001,
|
||||
"status": "open",
|
||||
"created_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['orders'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_exchange_cancel_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test exchange cancel command"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
with patch('aitbc_cli.commands.exchange_island.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(exchange_island, ['cancel', 'exchange_buy_test123'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "cancelled successfully" in result.output
|
||||
|
||||
|
||||
def test_exchange_orderbook_invalid_pair(mock_credentials_file, runner):
|
||||
"""Test exchange orderbook command with invalid pair"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
result = runner.invoke(exchange_island, ['orderbook', 'INVALID/PAIR'])
|
||||
|
||||
assert result.exit_code != 0
|
||||
|
||||
|
||||
def test_exchange_buy_invalid_currency(mock_credentials_file, runner):
|
||||
"""Test exchange buy command with invalid currency"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
result = runner.invoke(exchange_island, ['buy', '100', 'INVALID'])
|
||||
|
||||
assert result.exit_code != 0
|
||||
|
||||
|
||||
def test_exchange_sell_invalid_currency(mock_credentials_file, runner):
|
||||
"""Test exchange sell command with invalid currency"""
|
||||
from aitbc_cli.commands.exchange_island import exchange_island
|
||||
|
||||
result = runner.invoke(exchange_island, ['sell', '100', 'INVALID'])
|
||||
|
||||
assert result.exit_code != 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__])
|
||||
246
cli/tests/test_gpu_marketplace.py
Normal file
246
cli/tests/test_gpu_marketplace.py
Normal file
@@ -0,0 +1,246 @@
|
||||
"""
|
||||
Unit tests for GPU marketplace CLI commands
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from click.testing import CliRunner
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_credentials_file(tmp_path):
|
||||
"""Create a temporary credentials file for testing"""
|
||||
credentials = {
|
||||
"island_id": "test-island-id-12345",
|
||||
"island_name": "test-island",
|
||||
"island_chain_id": "ait-test",
|
||||
"credentials": {
|
||||
"genesis_block_hash": "0x1234567890abcdef",
|
||||
"genesis_address": "0xabcdef1234567890",
|
||||
"rpc_endpoint": "http://localhost:8006",
|
||||
"p2p_port": 8001
|
||||
},
|
||||
"members": [],
|
||||
"joined_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
|
||||
# Monkey patch the credentials path
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "island_credentials.json")
|
||||
|
||||
# Write credentials to temp file
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
json.dump(credentials, f)
|
||||
|
||||
yield credentials
|
||||
|
||||
# Cleanup
|
||||
if os.path.exists(ic_module.CREDENTIALS_PATH):
|
||||
os.remove(ic_module.CREDENTIALS_PATH)
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_keystore(tmp_path):
|
||||
"""Create a temporary keystore for testing"""
|
||||
keystore = {
|
||||
"test_key_id": {
|
||||
"public_key_pem": "-----BEGIN PUBLIC KEY-----\ntest_public_key_data\n-----END PUBLIC KEY-----"
|
||||
}
|
||||
}
|
||||
|
||||
keystore_path = tmp_path / "validator_keys.json"
|
||||
with open(keystore_path, 'w') as f:
|
||||
json.dump(keystore, f)
|
||||
|
||||
# Monkey patch keystore path
|
||||
import aitbc_cli.commands.gpu_marketplace as gm_module
|
||||
original_path = gm_module.__dict__.get('keystore_path')
|
||||
|
||||
yield str(keystore_path)
|
||||
|
||||
# Restore
|
||||
if original_path:
|
||||
gm_module.keystore_path = original_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
"""Create a Click CLI runner"""
|
||||
return CliRunner()
|
||||
|
||||
|
||||
def test_gpu_offer_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test GPU offer command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"transaction_id": "test_tx_id"}
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['offer', '2', '0.5', '24'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "GPU offer created successfully" in result.output
|
||||
|
||||
|
||||
def test_gpu_bid_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test GPU bid command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"transaction_id": "test_tx_id"}
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['bid', '2', '1.0', '24'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "GPU bid created successfully" in result.output
|
||||
|
||||
|
||||
def test_gpu_list_command(mock_credentials_file, runner):
|
||||
"""Test GPU list command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = [
|
||||
{
|
||||
"action": "offer",
|
||||
"offer_id": "gpu_offer_test",
|
||||
"gpu_count": 2,
|
||||
"price_per_gpu": 0.5,
|
||||
"duration_hours": 24,
|
||||
"total_price": 24.0,
|
||||
"status": "active",
|
||||
"provider_node_id": "test_provider",
|
||||
"created_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['list'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_gpu_cancel_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test GPU cancel command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['cancel', 'gpu_offer_test123'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "cancelled successfully" in result.output
|
||||
|
||||
|
||||
def test_gpu_accept_command(mock_credentials_file, mock_keystore, runner):
|
||||
"""Test GPU accept command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['accept', 'gpu_bid_test123'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "accepted successfully" in result.output
|
||||
|
||||
|
||||
def test_gpu_status_command(mock_credentials_file, runner):
|
||||
"""Test GPU status command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = [
|
||||
{
|
||||
"action": "offer",
|
||||
"offer_id": "gpu_offer_test",
|
||||
"gpu_count": 2,
|
||||
"price_per_gpu": 0.5,
|
||||
"duration_hours": 24,
|
||||
"total_price": 24.0,
|
||||
"status": "active",
|
||||
"provider_node_id": "test_provider",
|
||||
"created_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
]
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_response
|
||||
|
||||
result = runner.invoke(gpu, ['status', 'gpu_offer_test'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_gpu_match_command(mock_credentials_file, runner):
|
||||
"""Test GPU match command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
with patch('aitbc_cli.commands.gpu_marketplace.httpx.Client') as mock_client:
|
||||
# Mock the GET request for transactions
|
||||
mock_get_response = MagicMock()
|
||||
mock_get_response.status_code = 200
|
||||
mock_get_response.json.return_value = [
|
||||
{
|
||||
"action": "offer",
|
||||
"offer_id": "gpu_offer_test",
|
||||
"gpu_count": 2,
|
||||
"price_per_gpu": 0.5,
|
||||
"duration_hours": 24,
|
||||
"total_price": 24.0,
|
||||
"status": "active",
|
||||
"provider_node_id": "test_provider"
|
||||
},
|
||||
{
|
||||
"action": "bid",
|
||||
"bid_id": "gpu_bid_test",
|
||||
"gpu_count": 2,
|
||||
"max_price_per_gpu": 1.0,
|
||||
"duration_hours": 24,
|
||||
"max_total_price": 48.0,
|
||||
"status": "pending",
|
||||
"bidder_node_id": "test_bidder"
|
||||
}
|
||||
]
|
||||
|
||||
# Mock the POST request for match transaction
|
||||
mock_post_response = MagicMock()
|
||||
mock_post_response.status_code = 200
|
||||
|
||||
mock_client.return_value.__enter__.return_value.get.return_value = mock_get_response
|
||||
mock_client.return_value.__enter__.return_value.post.return_value = mock_post_response
|
||||
|
||||
result = runner.invoke(gpu, ['match'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_gpu_providers_command(mock_credentials_file, runner):
|
||||
"""Test GPU providers command"""
|
||||
from aitbc_cli.commands.gpu_marketplace import gpu
|
||||
|
||||
result = runner.invoke(gpu, ['providers'])
|
||||
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__])
|
||||
201
cli/tests/test_island_credentials.py
Normal file
201
cli/tests/test_island_credentials.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Unit tests for island credential loading utility
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from aitbc_cli.utils.island_credentials import (
|
||||
load_island_credentials,
|
||||
get_rpc_endpoint,
|
||||
get_chain_id,
|
||||
get_island_id,
|
||||
get_island_name,
|
||||
get_genesis_block_hash,
|
||||
get_genesis_address,
|
||||
validate_credentials,
|
||||
get_p2p_port
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_credentials_file(tmp_path):
|
||||
"""Create a temporary credentials file for testing"""
|
||||
credentials = {
|
||||
"island_id": "test-island-id-12345",
|
||||
"island_name": "test-island",
|
||||
"island_chain_id": "ait-test",
|
||||
"credentials": {
|
||||
"genesis_block_hash": "0x1234567890abcdef",
|
||||
"genesis_address": "0xabcdef1234567890",
|
||||
"rpc_endpoint": "http://localhost:8006",
|
||||
"p2p_port": 8001
|
||||
},
|
||||
"joined_at": "2024-01-01T00:00:00"
|
||||
}
|
||||
|
||||
# Monkey patch the credentials path
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "island_credentials.json")
|
||||
|
||||
# Write credentials to temp file
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
json.dump(credentials, f)
|
||||
|
||||
yield credentials
|
||||
|
||||
# Cleanup
|
||||
if os.path.exists(ic_module.CREDENTIALS_PATH):
|
||||
os.remove(ic_module.CREDENTIALS_PATH)
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
def test_load_island_credentials(mock_credentials_file):
|
||||
"""Test loading island credentials"""
|
||||
credentials = load_island_credentials()
|
||||
|
||||
assert credentials is not None
|
||||
assert credentials['island_id'] == "test-island-id-12345"
|
||||
assert credentials['island_name'] == "test-island"
|
||||
assert credentials['island_chain_id'] == "ait-test"
|
||||
assert 'credentials' in credentials
|
||||
|
||||
|
||||
def test_load_island_credentials_file_not_found():
|
||||
"""Test loading credentials when file doesn't exist"""
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = "/nonexistent/path/credentials.json"
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
load_island_credentials()
|
||||
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
def test_load_island_credentials_invalid_json(tmp_path):
|
||||
"""Test loading credentials with invalid JSON"""
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "invalid.json")
|
||||
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
f.write("invalid json")
|
||||
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
load_island_credentials()
|
||||
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
def test_load_island_credentials_missing_fields(tmp_path):
|
||||
"""Test loading credentials with missing required fields"""
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "incomplete.json")
|
||||
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
json.dump({"island_id": "test"}, f)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
load_island_credentials()
|
||||
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
def test_get_rpc_endpoint(mock_credentials_file):
|
||||
"""Test getting RPC endpoint from credentials"""
|
||||
rpc_endpoint = get_rpc_endpoint()
|
||||
|
||||
assert rpc_endpoint == "http://localhost:8006"
|
||||
|
||||
|
||||
def test_get_chain_id(mock_credentials_file):
|
||||
"""Test getting chain ID from credentials"""
|
||||
chain_id = get_chain_id()
|
||||
|
||||
assert chain_id == "ait-test"
|
||||
|
||||
|
||||
def test_get_island_id(mock_credentials_file):
|
||||
"""Test getting island ID from credentials"""
|
||||
island_id = get_island_id()
|
||||
|
||||
assert island_id == "test-island-id-12345"
|
||||
|
||||
|
||||
def test_get_island_name(mock_credentials_file):
|
||||
"""Test getting island name from credentials"""
|
||||
island_name = get_island_name()
|
||||
|
||||
assert island_name == "test-island"
|
||||
|
||||
|
||||
def test_get_genesis_block_hash(mock_credentials_file):
|
||||
"""Test getting genesis block hash from credentials"""
|
||||
genesis_hash = get_genesis_block_hash()
|
||||
|
||||
assert genesis_hash == "0x1234567890abcdef"
|
||||
|
||||
|
||||
def test_get_genesis_address(mock_credentials_file):
|
||||
"""Test getting genesis address from credentials"""
|
||||
genesis_address = get_genesis_address()
|
||||
|
||||
assert genesis_address == "0xabcdef1234567890"
|
||||
|
||||
|
||||
def test_get_p2p_port(mock_credentials_file):
|
||||
"""Test getting P2P port from credentials"""
|
||||
p2p_port = get_p2p_port()
|
||||
|
||||
assert p2p_port == 8001
|
||||
|
||||
|
||||
def test_validate_credentials_valid(mock_credentials_file):
|
||||
"""Test validating valid credentials"""
|
||||
is_valid = validate_credentials()
|
||||
|
||||
assert is_valid is True
|
||||
|
||||
|
||||
def test_validate_credentials_invalid_file(tmp_path):
|
||||
"""Test validating credentials when file doesn't exist"""
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
ic_module.CREDENTIALS_PATH = "/nonexistent/path/credentials.json"
|
||||
|
||||
is_valid = validate_credentials()
|
||||
|
||||
assert is_valid is False
|
||||
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
def test_get_genesis_block_hash_missing(tmp_path):
|
||||
"""Test getting genesis block hash when not present"""
|
||||
import aitbc_cli.utils.island_credentials as ic_module
|
||||
original_path = ic_module.CREDENTIALS_PATH
|
||||
|
||||
credentials = {
|
||||
"island_id": "test-island-id",
|
||||
"island_name": "test-island",
|
||||
"island_chain_id": "ait-test",
|
||||
"credentials": {}
|
||||
}
|
||||
|
||||
ic_module.CREDENTIALS_PATH = str(tmp_path / "no_genesis.json")
|
||||
with open(ic_module.CREDENTIALS_PATH, 'w') as f:
|
||||
json.dump(credentials, f)
|
||||
|
||||
genesis_hash = get_genesis_block_hash()
|
||||
|
||||
assert genesis_hash is None
|
||||
|
||||
ic_module.CREDENTIALS_PATH = original_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__])
|
||||
@@ -7,20 +7,28 @@ Configure P2P networking for your blockchain node.
|
||||
|
||||
```bash
|
||||
# Allow P2P port
|
||||
sudo ufw allow 7070/tcp
|
||||
sudo ufw allow 8001/tcp
|
||||
|
||||
# Allow RPC port
|
||||
sudo ufw allow 8080/tcp
|
||||
sudo ufw allow 8006/tcp
|
||||
|
||||
# Allow Marketplace port
|
||||
sudo ufw allow 8007/tcp
|
||||
|
||||
# Allow Exchange port
|
||||
sudo ufw allow 8008/tcp
|
||||
|
||||
# Allow from specific IPs
|
||||
sudo ufw allow from 10.0.0.0/8 to any port 8080
|
||||
sudo ufw allow from 10.0.0.0/8 to any port 8006
|
||||
```
|
||||
|
||||
### Port Forwarding
|
||||
|
||||
If behind a NAT, configure port forwarding:
|
||||
- External port 7070 → Internal IP:7070
|
||||
- External port 8080 → Internal IP:8080
|
||||
- External port 8001 → Internal IP:8001
|
||||
- External port 8006 → Internal IP:8006
|
||||
- External port 8007 → Internal IP:8007 (Marketplace)
|
||||
- External port 8008 → Internal IP:8008 (Exchange)
|
||||
|
||||
## Federated Mesh Architecture
|
||||
|
||||
|
||||
@@ -1,247 +0,0 @@
|
||||
# AITBC Nginx Reverse Proxy Configuration
|
||||
# Domain: aitbc.keisanki.net
|
||||
# This configuration replaces the need for firehol/iptables port forwarding
|
||||
|
||||
# HTTP to HTTPS redirect
|
||||
server {
|
||||
listen 80;
|
||||
server_name aitbc.keisanki.net;
|
||||
|
||||
# Redirect all HTTP traffic to HTTPS
|
||||
return 301 https://$server_name$request_uri;
|
||||
}
|
||||
|
||||
# Main HTTPS server block
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name aitbc.keisanki.net;
|
||||
|
||||
# SSL Configuration (Let's Encrypt certificates)
|
||||
ssl_certificate /etc/letsencrypt/live/aitbc.keisanki.net/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/aitbc.keisanki.net/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline' 'unsafe-eval'" always;
|
||||
|
||||
# Enable gzip compression
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
|
||||
|
||||
# Blockchain Explorer (main route)
|
||||
location / {
|
||||
proxy_pass http://192.168.100.10:3000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# WebSocket support if needed
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
# Coordinator API
|
||||
location /api/ {
|
||||
proxy_pass http://192.168.100.10:8000/v1/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# CORS headers for API
|
||||
add_header Access-Control-Allow-Origin "*" always;
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key" always;
|
||||
|
||||
# Handle preflight requests
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS";
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key";
|
||||
add_header Access-Control-Max-Age 1728000;
|
||||
add_header Content-Type "text/plain; charset=utf-8";
|
||||
add_header Content-Length 0;
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
# Blockchain Node 1 RPC
|
||||
location /rpc/ {
|
||||
proxy_pass http://192.168.100.10:8082/rpc/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Blockchain Node 2 RPC (alternative endpoint)
|
||||
location /rpc2/ {
|
||||
proxy_pass http://192.168.100.10:8081/rpc/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Exchange API
|
||||
location /exchange/ {
|
||||
proxy_pass http://192.168.100.10:9080/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Marketplace UI (if separate from explorer)
|
||||
location /marketplace/ {
|
||||
proxy_pass http://192.168.100.10:3001/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# Handle subdirectory rewrite
|
||||
rewrite ^/marketplace/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
# Admin dashboard
|
||||
location /admin/ {
|
||||
proxy_pass http://192.168.100.10:8080/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# Optional: Restrict admin access
|
||||
# allow 192.168.100.0/24;
|
||||
# allow 127.0.0.1;
|
||||
# deny all;
|
||||
}
|
||||
|
||||
# Health check endpoint
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "healthy\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
|
||||
# API health checks
|
||||
location /api/health {
|
||||
proxy_pass http://192.168.100.10:8000/v1/health;
|
||||
proxy_set_header Host $host;
|
||||
access_log off;
|
||||
}
|
||||
|
||||
# Static assets caching
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
|
||||
proxy_pass http://192.168.100.10:3000;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
# Don't log static file access
|
||||
access_log off;
|
||||
}
|
||||
|
||||
# Deny access to hidden files
|
||||
location ~ /\. {
|
||||
deny all;
|
||||
access_log off;
|
||||
log_not_found off;
|
||||
}
|
||||
|
||||
# Custom error pages
|
||||
error_page 404 /404.html;
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
}
|
||||
|
||||
# Optional: Subdomain for API-only access
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name api.aitbc.keisanki.net;
|
||||
|
||||
# SSL Configuration (same certificates)
|
||||
ssl_certificate /etc/letsencrypt/live/aitbc.keisanki.net/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/aitbc.keisanki.net/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "DENY" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
|
||||
# API routes only
|
||||
location / {
|
||||
proxy_pass http://192.168.100.10:8000/v1/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# CORS headers
|
||||
add_header Access-Control-Allow-Origin "*" always;
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key" always;
|
||||
|
||||
# Handle preflight requests
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS";
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key";
|
||||
add_header Access-Control-Max-Age 1728000;
|
||||
add_header Content-Type "text/plain; charset=utf-8";
|
||||
add_header Content-Length 0;
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Optional: Subdomain for blockchain RPC
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name rpc.aitbc.keisanki.net;
|
||||
|
||||
# SSL Configuration
|
||||
ssl_certificate /etc/letsencrypt/live/aitbc.keisanki.net/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/aitbc.keisanki.net/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
# Security headers
|
||||
add_header X-Frame-Options "DENY" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
|
||||
# RPC routes
|
||||
location / {
|
||||
proxy_pass http://192.168.100.10:8082/rpc/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
# AITBC Services Nginx Configuration
|
||||
# Domain: https://aitbc.bubuit.net
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name aitbc.bubuit.net;
|
||||
|
||||
# Redirect to HTTPS
|
||||
return 301 https://$server_name$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name aitbc.bubuit.net;
|
||||
|
||||
# SSL Configuration (Let's Encrypt)
|
||||
ssl_certificate /etc/letsencrypt/live/aitbc.bubuit.net/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/aitbc.bubuit.net/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
# Security Headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "no-referrer-when-downgrade" always;
|
||||
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
|
||||
|
||||
# API Routes
|
||||
location /api/ {
|
||||
proxy_pass http://127.0.0.1:8000/v1/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Blockchain RPC Routes
|
||||
location /rpc/ {
|
||||
proxy_pass http://127.0.0.1:9080/rpc/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Marketplace UI
|
||||
location /Marketplace {
|
||||
proxy_pass http://127.0.0.1:3001/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Handle subdirectory
|
||||
rewrite ^/Marketplace/(.*)$ /$1 break;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Trade Exchange
|
||||
location /Exchange {
|
||||
proxy_pass http://127.0.0.1:3002/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Handle subdirectory
|
||||
rewrite ^/Exchange/(.*)$ /$1 break;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Exchange API Routes
|
||||
location /api/trades/ {
|
||||
proxy_pass http://127.0.0.1:3003/api/trades/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
location /api/orders {
|
||||
proxy_pass http://127.0.0.1:3003/api/orders;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Wallet CLI API (if needed)
|
||||
location /wallet/ {
|
||||
proxy_pass http://127.0.0.1:8000/wallet/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Admin routes
|
||||
location /admin/ {
|
||||
proxy_pass http://127.0.0.1:8000/admin/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Restrict access (optional)
|
||||
# allow 127.0.0.1;
|
||||
# allow 10.1.223.0/24;
|
||||
# deny all;
|
||||
}
|
||||
|
||||
# Health check
|
||||
location /health {
|
||||
proxy_pass http://127.0.0.1:8000/v1/health;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
# Default redirect to Marketplace
|
||||
location / {
|
||||
return 301 /Marketplace;
|
||||
}
|
||||
|
||||
# Static file caching
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
}
|
||||
@@ -1,55 +1,74 @@
|
||||
# AITBC Services Nginx Configuration
|
||||
# Copy to nginx-aitbc.conf and replace YOUR_DOMAIN with your actual domain
|
||||
# Adjust ports if your services run on different ones
|
||||
# Adjust BACKEND_IP if running in containers (default: 127.0.0.1 for localhost)
|
||||
# For Incus containers, set BACKEND_IP to the container IP address
|
||||
# You can use sed to replace: sed 's/127.0.0.1/YOUR_CONTAINER_IP/g' nginx-aitbc.conf.example > nginx-aitbc.conf
|
||||
#
|
||||
# After deployment, run: certbot --nginx -d YOUR_DOMAIN to enable SSL
|
||||
|
||||
# Backend IP address - set to container IP if running in Incus/LXD
|
||||
map $host $backend_ip {
|
||||
default 127.0.0.1;
|
||||
# Uncomment and set your container IP below:
|
||||
# host.example.com 10.0.0.100;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name YOUR_DOMAIN;
|
||||
|
||||
# Redirect to HTTPS
|
||||
return 301 https://$server_name$request_uri;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
server_name YOUR_DOMAIN;
|
||||
|
||||
# SSL Configuration (Let's Encrypt)
|
||||
ssl_certificate /etc/letsencrypt/live/YOUR_DOMAIN/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/YOUR_DOMAIN/privkey.pem;
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
# Security Headers
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-XSS-Protection "1; mode=block" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
add_header Referrer-Policy "no-referrer-when-downgrade" always;
|
||||
add_header Content-Security-Policy "default-src 'self' http: https: data: blob: 'unsafe-inline'" always;
|
||||
|
||||
|
||||
# Enable gzip compression
|
||||
gzip on;
|
||||
gzip_vary on;
|
||||
gzip_min_length 1024;
|
||||
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
|
||||
|
||||
# Coordinator API
|
||||
location /api/ {
|
||||
proxy_pass http://127.0.0.1:8000/v1/;
|
||||
proxy_pass http://$backend_ip:8000/v1/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
|
||||
# CORS headers for API
|
||||
add_header Access-Control-Allow-Origin "*" always;
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS" always;
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key" always;
|
||||
|
||||
# Handle preflight requests
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header Access-Control-Allow-Origin "*";
|
||||
add_header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS";
|
||||
add_header Access-Control-Allow-Headers "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization,X-Api-Key";
|
||||
add_header Access-Control-Max-Age 1728000;
|
||||
add_header Content-Type "text/plain; charset=utf-8";
|
||||
add_header Content-Length 0;
|
||||
return 204;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Blockchain RPC
|
||||
location /rpc/ {
|
||||
proxy_pass http://127.0.0.1:9080/rpc/;
|
||||
proxy_pass http://$backend_ip:8006/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
|
||||
# Marketplace UI
|
||||
location /Marketplace {
|
||||
proxy_pass http://127.0.0.1:3001/;
|
||||
proxy_pass http://$backend_ip:8007/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
@@ -57,10 +76,10 @@ server {
|
||||
rewrite ^/Marketplace/(.*)$ /$1 break;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
|
||||
# Trade Exchange
|
||||
location /Exchange {
|
||||
proxy_pass http://127.0.0.1:3002/;
|
||||
proxy_pass http://$backend_ip:8008/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
@@ -68,32 +87,103 @@ server {
|
||||
rewrite ^/Exchange/(.*)$ /$1 break;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
|
||||
# Exchange API Routes
|
||||
location /api/trades/ {
|
||||
proxy_pass http://$backend_ip:8008/api/trades/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
location /api/orders {
|
||||
proxy_pass http://$backend_ip:8008/api/orders;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_buffering off;
|
||||
}
|
||||
|
||||
# Wallet API
|
||||
location /wallet/ {
|
||||
proxy_pass http://127.0.0.1:8000/wallet/;
|
||||
proxy_pass http://$backend_ip:8003/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
|
||||
# Health check
|
||||
location /health {
|
||||
proxy_pass http://127.0.0.1:8000/v1/health;
|
||||
proxy_pass http://$backend_ip:8000/v1/health;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
|
||||
# Default — serve static website
|
||||
location / {
|
||||
root /var/www/html;
|
||||
index index.html;
|
||||
try_files $uri $uri/ =404;
|
||||
|
||||
# WebSocket support if needed
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
|
||||
# Deny access to hidden files
|
||||
location ~ /\. {
|
||||
deny all;
|
||||
access_log off;
|
||||
log_not_found off;
|
||||
}
|
||||
|
||||
# Custom error pages
|
||||
error_page 404 /404.html;
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
# Static file caching
|
||||
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ {
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# Serve production assets
|
||||
location /assets/ {
|
||||
alias /var/www/html/assets/;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
}
|
||||
|
||||
# Font serving
|
||||
location /fonts-font-awesome/ {
|
||||
alias /usr/share/fonts-font-awesome/;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
|
||||
# Admin endpoints
|
||||
location = /api/treasury-balance {
|
||||
proxy_pass http://$backend_ip:8085;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location = /api/exchange/wallet/info {
|
||||
proxy_pass http://$backend_ip:8085;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Add to /etc/nginx/sites-available/aitbc.conf
|
||||
|
||||
# Serve production assets
|
||||
location /assets/ {
|
||||
alias /var/www/html/assets/;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
# Gzip compression
|
||||
gzip on;
|
||||
gzip_types text/css application/javascript image/svg+xml;
|
||||
}
|
||||
|
||||
# Security headers
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin" always;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header X-Content-Type-Options "nosniff" always;
|
||||
@@ -1,85 +0,0 @@
|
||||
# Geographic Load Balancing Nginx Configuration
|
||||
# Distributes traffic to the closest regional endpoint based on the client's IP
|
||||
|
||||
# Ensure Nginx is compiled with the GeoIP module:
|
||||
# nginx -V 2>&1 | grep -- --with-http_geoip_module
|
||||
|
||||
# Define the GeoIP database location
|
||||
geoip_country /usr/share/GeoIP/GeoIP.dat;
|
||||
geoip_city /usr/share/GeoIP/GeoIPCity.dat;
|
||||
|
||||
# Map the continent code to an upstream backend
|
||||
map $geoip_city_continent_code $closest_region {
|
||||
default us_east_backend; # Default fallback
|
||||
|
||||
# North America
|
||||
NA us_east_backend;
|
||||
|
||||
# Europe
|
||||
EU eu_central_backend;
|
||||
|
||||
# Asia
|
||||
AS ap_northeast_backend;
|
||||
|
||||
# Oceania, Africa, South America could map to the nearest available
|
||||
OC ap_northeast_backend;
|
||||
AF eu_central_backend;
|
||||
SA us_east_backend;
|
||||
}
|
||||
|
||||
# Define the upstream backends for each region
|
||||
upstream us_east_backend {
|
||||
# US East instances
|
||||
server 10.1.0.100:8000 max_fails=3 fail_timeout=30s;
|
||||
server 10.1.0.101:8000 max_fails=3 fail_timeout=30s backup;
|
||||
keepalive 32;
|
||||
}
|
||||
|
||||
upstream eu_central_backend {
|
||||
# EU Central instances
|
||||
server 10.2.0.100:8000 max_fails=3 fail_timeout=30s;
|
||||
server 10.2.0.101:8000 max_fails=3 fail_timeout=30s backup;
|
||||
keepalive 32;
|
||||
}
|
||||
|
||||
upstream ap_northeast_backend {
|
||||
# AP Northeast instances
|
||||
server 10.3.0.100:8000 max_fails=3 fail_timeout=30s;
|
||||
server 10.3.0.101:8000 max_fails=3 fail_timeout=30s backup;
|
||||
keepalive 32;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
listen 443 ssl http2;
|
||||
server_name api.aitbc.dev;
|
||||
|
||||
# SSL configuration (omitted for brevity, assume Let's Encrypt managed)
|
||||
# ssl_certificate /etc/letsencrypt/live/api.aitbc.dev/fullchain.pem;
|
||||
# ssl_certificate_key /etc/letsencrypt/live/api.aitbc.dev/privkey.pem;
|
||||
|
||||
# Add headers to indicate routing decisions for debugging
|
||||
add_header X-Region-Routed $closest_region always;
|
||||
add_header X-Client-Continent $geoip_city_continent_code always;
|
||||
|
||||
location / {
|
||||
# Proxy traffic to the mapped upstream region
|
||||
proxy_pass http://$closest_region;
|
||||
|
||||
# Standard proxy headers
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Enable keepalive
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
}
|
||||
|
||||
# Health check endpoint for external load balancers/monitors
|
||||
location /health {
|
||||
access_log off;
|
||||
return 200 "OK\n";
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
# Local nginx configuration for AITBC domain testing
|
||||
# Save as /etc/nginx/sites-available/aitbc-local
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name aitbc.bubuit.net localhost;
|
||||
|
||||
# API routes
|
||||
location /api/ {
|
||||
proxy_pass http://127.0.0.1:8000/v1/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Admin routes
|
||||
location /admin/ {
|
||||
proxy_pass http://127.0.0.1:8000/admin/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Blockchain RPC
|
||||
location /rpc/ {
|
||||
proxy_pass http://127.0.0.1:9080/rpc/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Marketplace UI
|
||||
location /Marketplace {
|
||||
proxy_pass http://127.0.0.1:3001/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Trade Exchange
|
||||
location /Exchange {
|
||||
proxy_pass http://127.0.0.1:3002/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Health endpoint
|
||||
location /health {
|
||||
proxy_pass http://127.0.0.1:8000/v1/health;
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
# Default redirect
|
||||
location / {
|
||||
return 301 /Marketplace;
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
location = /api/treasury-balance {
|
||||
proxy_pass http://127.0.0.1:8085;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location = /api/exchange/wallet/info {
|
||||
proxy_pass http://127.0.0.1:8085;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
location /fonts-font-awesome/ {
|
||||
alias /usr/share/fonts-font-awesome/;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public, immutable";
|
||||
}
|
||||
@@ -29,10 +29,9 @@ echo ""
|
||||
echo "🔧 Core Services (8000-8009):"
|
||||
check_service "Coordinator API" "http://localhost:8000/health"
|
||||
check_service "Exchange API" "http://localhost:8001/api/health"
|
||||
check_service "Marketplace API" "http://localhost:8002/health"
|
||||
check_service "Marketplace API" "http://localhost:8007/health"
|
||||
check_service "Wallet API" "http://localhost:8003/health"
|
||||
check_service "Explorer" "http://localhost:8004/health"
|
||||
check_service "Web UI" "http://localhost:8007/health"
|
||||
|
||||
# Check blockchain node and RPC
|
||||
echo ""
|
||||
|
||||
@@ -109,9 +109,9 @@ class P2PDiscovery:
|
||||
"""Add bootstrap node for initial connection"""
|
||||
self.bootstrap_nodes.append((address, port))
|
||||
|
||||
def generate_node_id(self, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from address, port, and public key"""
|
||||
content = f"{address}:{port}:{public_key}"
|
||||
def generate_node_id(self, hostname: str, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from hostname, address, port, and public key"""
|
||||
content = f"{hostname}:{address}:{port}:{public_key}"
|
||||
return hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
async def start_discovery(self):
|
||||
@@ -2343,17 +2343,18 @@ class TestP2PDiscovery:
|
||||
|
||||
def test_generate_node_id(self):
|
||||
"""Test node ID generation"""
|
||||
hostname = "node1.example.com"
|
||||
address = "127.0.0.1"
|
||||
port = 8000
|
||||
public_key = "test_public_key"
|
||||
|
||||
node_id = self.discovery.generate_node_id(address, port, public_key)
|
||||
|
||||
|
||||
node_id = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
|
||||
assert isinstance(node_id, str)
|
||||
assert len(node_id) == 64 # SHA256 hex length
|
||||
|
||||
|
||||
# Test consistency
|
||||
node_id2 = self.discovery.generate_node_id(address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
assert node_id == node_id2
|
||||
|
||||
def test_add_bootstrap_node(self):
|
||||
@@ -2367,17 +2368,18 @@ class TestP2PDiscovery:
|
||||
|
||||
def test_generate_node_id_consistency(self):
|
||||
"""Test node ID generation consistency"""
|
||||
hostname = "node2.example.com"
|
||||
address = "192.168.1.1"
|
||||
port = 9000
|
||||
public_key = "test_key"
|
||||
|
||||
node_id1 = self.discovery.generate_node_id(address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(address, port, public_key)
|
||||
|
||||
|
||||
node_id1 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
node_id2 = self.discovery.generate_node_id(hostname, address, port, public_key)
|
||||
|
||||
assert node_id1 == node_id2
|
||||
|
||||
|
||||
# Different inputs should produce different IDs
|
||||
node_id3 = self.discovery.generate_node_id("192.168.1.2", port, public_key)
|
||||
node_id3 = self.discovery.generate_node_id(hostname, "192.168.1.2", port, public_key)
|
||||
assert node_id1 != node_id3
|
||||
|
||||
def test_get_peer_count_empty(self):
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple FastAPI service for AITBC Web UI (Port 8016)
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.insert(0, '/opt/aitbc/apps/coordinator-api/src')
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
app = FastAPI(title='AITBC Web UI Service', version='1.0.0')
|
||||
|
||||
@app.get('/health')
|
||||
def health():
|
||||
return {
|
||||
'status': 'ok',
|
||||
'service': 'web-ui',
|
||||
'port': 8016,
|
||||
'python_version': sys.version.split()[0]
|
||||
}
|
||||
|
||||
@app.get('/')
|
||||
def root():
|
||||
return HTMLResponse("""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>AITBC Web UI</title>
|
||||
<style>
|
||||
body { font-family: Arial, sans-serif; margin: 40px; }
|
||||
.container { max-width: 800px; margin: 0 auto; }
|
||||
.header { text-align: center; color: #333; }
|
||||
.status { background: #e8f5e8; padding: 20px; border-radius: 5px; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="header">
|
||||
<h1>🚀 AITBC Web UI</h1>
|
||||
<p>Port 8016 - Enhanced Services Interface</p>
|
||||
</div>
|
||||
<div class="status">
|
||||
<h2>🎯 Service Status</h2>
|
||||
<p>✅ Web UI: Running on port 8016</p>
|
||||
<p>✅ Coordinator API: Running on port 8000</p>
|
||||
<p>✅ Exchange API: Running on port 8001</p>
|
||||
<p>✅ Blockchain RPC: Running on port 8003</p>
|
||||
<p>✅ Enhanced Services: Running on ports 8010-8016</p>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
""")
|
||||
|
||||
if __name__ == '__main__':
|
||||
port = int(os.environ.get('PORT', 8016))
|
||||
uvicorn.run(app, host='0.0.0.0', port=port)
|
||||
@@ -228,10 +228,9 @@ echo ""
|
||||
echo "🔧 Core Services (8000-8009):"
|
||||
check_service "Coordinator API" "http://localhost:8000/health"
|
||||
check_service "Exchange API" "http://localhost:8001/api/health"
|
||||
check_service "Marketplace API" "http://localhost:8002/health"
|
||||
check_service "Marketplace API" "http://localhost:8007/health"
|
||||
check_service "Wallet API" "http://localhost:8003/health"
|
||||
check_service "Explorer" "http://localhost:8004/health"
|
||||
check_service "Web UI" "http://localhost:8007/health"
|
||||
|
||||
# Check blockchain node and RPC
|
||||
echo ""
|
||||
@@ -282,12 +281,12 @@ start_services() {
|
||||
log "Starting AITBC services..."
|
||||
|
||||
# Try systemd first
|
||||
if systemctl start aitbc-wallet aitbc-coordinator-api aitbc-exchange-api aitbc-blockchain-node aitbc-blockchain-rpc aitbc-gpu aitbc-marketplace aitbc-openclaw aitbc-ai aitbc-learning aitbc-explorer aitbc-web-ui aitbc-agent-coordinator aitbc-agent-registry aitbc-multimodal aitbc-modality-optimization 2>/dev/null; then
|
||||
if systemctl start aitbc-wallet aitbc-coordinator-api aitbc-exchange-api aitbc-blockchain-node aitbc-blockchain-rpc aitbc-gpu aitbc-marketplace aitbc-openclaw aitbc-ai aitbc-learning aitbc-explorer aitbc-agent-coordinator aitbc-agent-registry aitbc-multimodal aitbc-modality-optimization 2>/dev/null; then
|
||||
log "Services started via systemd"
|
||||
sleep 5
|
||||
|
||||
|
||||
# Check if services are running
|
||||
if systemctl is-active --quiet aitbc-wallet aitbc-coordinator-api aitbc-exchange-api aitbc-blockchain-node aitbc-blockchain-rpc aitbc-gpu aitbc-marketplace aitbc-openclaw aitbc-ai aitbc-learning aitbc-explorer aitbc-web-ui aitbc-agent-coordinator aitbc-agent-registry aitbc-multimodal aitbc-modality-optimization; then
|
||||
if systemctl is-active --quiet aitbc-wallet aitbc-coordinator-api aitbc-exchange-api aitbc-blockchain-node aitbc-blockchain-rpc aitbc-gpu aitbc-marketplace aitbc-openclaw aitbc-ai aitbc-learning aitbc-explorer aitbc-agent-coordinator aitbc-agent-registry aitbc-multimodal aitbc-modality-optimization; then
|
||||
success "Services started successfully via systemd"
|
||||
else
|
||||
warning "Some systemd services failed, falling back to manual startup"
|
||||
|
||||
@@ -17,12 +17,11 @@ echo "Multimodal GPU (8010): $(curl -s http://localhost:8010/health | jq -r .sta
|
||||
echo "GPU Multimodal (8011): $(curl -s http://localhost:8011/health | jq -r .status 2>/dev/null || echo 'FAIL')"
|
||||
echo "Modality Optimization (8012): $(curl -s http://localhost:8012/health | jq -r .status 2>/dev/null || echo 'FAIL')"
|
||||
echo "Adaptive Learning (8013): $(curl -s http://localhost:8013/health | jq -r .status 2>/dev/null || echo 'FAIL')"
|
||||
echo "Web UI (8016): $(curl -s http://localhost:8016/health | jq -r .status 2>/dev/null || echo 'FAIL')"
|
||||
echo "Geographic Load Balancer (8017): $(curl -s http://localhost:8017/health | jq -r .status 2>/dev/null || echo 'FAIL')"
|
||||
|
||||
echo ""
|
||||
echo "📊 Port Usage:"
|
||||
sudo netstat -tlnp | grep -E ":(8000|8001|8003|8010|8011|8012|8013|8016|8017)" | sort
|
||||
sudo netstat -tlnp | grep -E ":(8000|8001|8003|8010|8011|8012|8013|8017)" | sort
|
||||
|
||||
echo ""
|
||||
echo "✅ All services tested!"
|
||||
|
||||
@@ -6,7 +6,7 @@ set -euo pipefail
|
||||
|
||||
echo "=== 🧪 AITBC Comprehensive Services Test ==="
|
||||
echo "Date: $(date)"
|
||||
echo "Testing all services with new port logic (8000-8003, 8010-8016)"
|
||||
echo "Testing all services with new port logic (8000-8003, 8010-8015)"
|
||||
echo ""
|
||||
|
||||
# Colors for output
|
||||
@@ -82,7 +82,6 @@ test_service "Multimodal GPU (8010)" "http://localhost:8010/health" '"service":"
|
||||
test_service "GPU Multimodal (8011)" "http://localhost:8011/health" '"service":"gpu-multimodal"'
|
||||
test_service "Modality Optimization (8012)" "http://localhost:8012/health" '"service":"modality-optimization"'
|
||||
test_service "Adaptive Learning (8013)" "http://localhost:8013/health" '"service":"adaptive-learning"'
|
||||
test_service "Web UI (8016)" "http://localhost:8016/health" '"service":"web-ui"'
|
||||
|
||||
echo ""
|
||||
echo "🔧 Service Features Testing"
|
||||
@@ -106,7 +105,6 @@ test_port "8010" "Multimodal GPU"
|
||||
test_port "8011" "GPU Multimodal"
|
||||
test_port "8012" "Modality Optimization"
|
||||
test_port "8013" "Adaptive Learning"
|
||||
test_port "8016" "Web UI"
|
||||
|
||||
echo ""
|
||||
echo "📊 Test Results Summary"
|
||||
|
||||
@@ -143,7 +143,7 @@ check_network() {
|
||||
echo -e "\n📋 Checking Network Requirements..."
|
||||
|
||||
# Check if required ports are available
|
||||
REQUIRED_PORTS=(8000 8001 8002 8003 8010 8011 8012 8013 8014 8015 8016)
|
||||
REQUIRED_PORTS=(8000 8001 8003 8007 8008 8010 8011 8012 8013 8014 8015)
|
||||
OCCUPIED_PORTS=()
|
||||
|
||||
for port in "${REQUIRED_PORTS[@]}"; do
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AITBC Blockchain Follower Node - Port 8007
|
||||
Follows the main blockchain node and provides follower API endpoints
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
# Set environment variables
|
||||
os.environ.setdefault('PYTHONPATH', '/opt/aitbc/apps/blockchain-node/src:/opt/aitbc/services')
|
||||
os.environ.setdefault('BLOCKCHAIN_DATA_DIR', '/var/lib/aitbc/data/follower')
|
||||
os.environ.setdefault('BLOCKCHAIN_CONFIG_DIR', '/etc/aitbc')
|
||||
os.environ.setdefault('BLOCKCHAIN_LOG_DIR', '/var/log/aitbc/production')
|
||||
os.environ.setdefault('BLOCKCHAIN_PORT', '8007')
|
||||
os.environ.setdefault('BLOCKCHAIN_ROLE', 'follower')
|
||||
|
||||
# Add paths
|
||||
sys.path.insert(0, '/opt/aitbc/apps/blockchain-node/src')
|
||||
sys.path.insert(0, '/opt/aitbc/services')
|
||||
|
||||
try:
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
# Create follower FastAPI app
|
||||
app = FastAPI(
|
||||
title="AITBC Blockchain Follower Node",
|
||||
description="Follower node for AITBC blockchain network",
|
||||
version="v0.3.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc"
|
||||
)
|
||||
|
||||
# Basic endpoints
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {
|
||||
"status": "follower_node",
|
||||
"port": 8007,
|
||||
"role": "follower",
|
||||
"service": "aitbc-blockchain-follower",
|
||||
"version": "v0.3.0"
|
||||
}
|
||||
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
return {
|
||||
"status": "healthy",
|
||||
"service": "follower-node",
|
||||
"port": 8007,
|
||||
"role": "follower"
|
||||
}
|
||||
|
||||
@app.get("/status")
|
||||
async def status():
|
||||
return {
|
||||
"status": "active",
|
||||
"node_type": "follower",
|
||||
"port": 8007,
|
||||
"following": "http://localhost:8006"
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logger.info("Starting AITBC Blockchain Follower Node on port 8007")
|
||||
logger.info("Following main node at http://localhost:8006")
|
||||
|
||||
# Start server on port 8007
|
||||
uvicorn.run(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=8007,
|
||||
log_level="info"
|
||||
)
|
||||
|
||||
except ImportError as e:
|
||||
print(f"Import error: {e}")
|
||||
print("Creating minimal follower node...")
|
||||
|
||||
# Fallback simple server
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
import json
|
||||
|
||||
class FollowerHandler(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
if self.path == "/":
|
||||
response = {
|
||||
"status": "follower_node",
|
||||
"port": 8007,
|
||||
"role": "follower",
|
||||
"service": "aitbc-blockchain-follower",
|
||||
"version": "v0.3.0"
|
||||
}
|
||||
elif self.path == "/health":
|
||||
response = {
|
||||
"status": "healthy",
|
||||
"service": "follower-node",
|
||||
"port": 8007,
|
||||
"role": "follower"
|
||||
}
|
||||
elif self.path == "/status":
|
||||
response = {
|
||||
"status": "active",
|
||||
"node_type": "follower",
|
||||
"port": 8007,
|
||||
"following": "http://localhost:8006"
|
||||
}
|
||||
else:
|
||||
response = {"error": "Not found"}
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(response).encode())
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass # Suppress logging
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Starting minimal follower node on port 8007")
|
||||
server = HTTPServer(('0.0.0.0', 8007), FollowerHandler)
|
||||
server.serve_forever()
|
||||
@@ -31,7 +31,7 @@ def main():
|
||||
|
||||
# Run the marketplace service
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8002)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8007)
|
||||
|
||||
except ImportError as e:
|
||||
logger.error(f"Failed to import marketplace app: {e}")
|
||||
@@ -43,7 +43,7 @@ def main():
|
||||
logger.info("Successfully imported unified marketplace app")
|
||||
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8002)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8007)
|
||||
|
||||
except ImportError as e2:
|
||||
logger.error(f"Failed to import unified marketplace: {e2}")
|
||||
@@ -73,8 +73,8 @@ def heartbeat_service():
|
||||
async def root():
|
||||
return {"service": "marketplace", "status": "running", "endpoints": ["/health", "/"]}
|
||||
|
||||
logger.info("Starting simple marketplace API on port 8002")
|
||||
uvicorn.run(app, host="0.0.0.0", port=8002)
|
||||
logger.info("Starting simple marketplace API on port 8007")
|
||||
uvicorn.run(app, host="0.0.0.0", port=8007)
|
||||
|
||||
except ImportError:
|
||||
# Fallback to simple heartbeat
|
||||
|
||||
@@ -10,7 +10,7 @@ Group=root
|
||||
WorkingDirectory=/opt/aitbc
|
||||
Environment=PATH=/usr/bin:/usr/local/bin:/usr/bin:/bin
|
||||
Environment=NODE_ID=aitbc
|
||||
Environment=MARKETPLACE_PORT=8002
|
||||
Environment=MARKETPLACE_PORT=8007
|
||||
Environment=WORKERS=1
|
||||
Environment=PYTHONPATH=/opt/aitbc/services
|
||||
EnvironmentFile=/etc/aitbc/production.env
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
[Unit]
|
||||
Description=AITBC Web UI Service (Port 8016)
|
||||
Documentation=https://docs.aitbc.bubuit.net
|
||||
After=network.target aitbc-coordinator-api.service
|
||||
Wants=aitbc-coordinator-api.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=aitbc
|
||||
Group=aitbc
|
||||
WorkingDirectory=/opt/aitbc/apps/blockchain-explorer
|
||||
Environment=PATH=/opt/aitbc/venv/bin:/usr/bin
|
||||
Environment=PYTHONPATH=/opt/aitbc/apps/blockchain-explorer
|
||||
Environment=PORT=8016
|
||||
Environment=SERVICE_TYPE=web-ui
|
||||
Environment=LOG_LEVEL=INFO
|
||||
ExecStart=/opt/aitbc/venv/bin/python -m http.server 8016 --bind 127.0.0.1
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=aitbc-web-ui
|
||||
|
||||
# Security settings
|
||||
NoNewPrivileges=true
|
||||
PrivateTmp=true
|
||||
ProtectSystem=strict
|
||||
ProtectHome=true
|
||||
ReadWritePaths=/var/log/aitbc /var/lib/aitbc/data /opt/aitbc/venv
|
||||
LimitNOFILE=65536
|
||||
|
||||
# Resource limits
|
||||
MemoryMax=1G
|
||||
CPUQuota=100%
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -266,8 +266,9 @@ class TestPhase2NetworkInfrastructure:
|
||||
port = 8000
|
||||
public_key = "test_public_key"
|
||||
|
||||
node_id1 = p2p_discovery.generate_node_id(address, port, public_key)
|
||||
node_id2 = p2p_discovery.generate_node_id(address, port, public_key)
|
||||
hostname = "node.example.com"
|
||||
node_id1 = p2p_discovery.generate_node_id(hostname, address, port, public_key)
|
||||
node_id2 = p2p_discovery.generate_node_id(hostname, address, port, public_key)
|
||||
|
||||
assert node_id1 == node_id2 # Same inputs should generate same ID
|
||||
assert len(node_id1) == 64 # SHA256 hex length
|
||||
|
||||
Reference in New Issue
Block a user