network: add hub registration, Redis persistence, and federated mesh join protocol
Some checks failed
CLI Tests / test-cli (push) Has been cancelled
Integration Tests / test-service-integration (push) Has been cancelled
Python Tests / test-python (push) Has been cancelled
Security Scanning / security-scan (push) Has been cancelled
Documentation Validation / validate-docs (push) Has been cancelled
API Endpoint Tests / test-api-endpoints (push) Has been cancelled
Systemd Sync / sync-systemd (push) Has been cancelled
Some checks failed
CLI Tests / test-cli (push) Has been cancelled
Integration Tests / test-service-integration (push) Has been cancelled
Python Tests / test-python (push) Has been cancelled
Security Scanning / security-scan (push) Has been cancelled
Documentation Validation / validate-docs (push) Has been cancelled
API Endpoint Tests / test-api-endpoints (push) Has been cancelled
Systemd Sync / sync-systemd (push) Has been cancelled
- Change default P2P port from 7070 to 8001 in config and .env.example - Add redis_url configuration option for hub persistence (default: redis://localhost:6379) - Implement DNS-based hub registration/unregistration via HTTPS API endpoints - Add Redis persistence for hub registrations with 1-hour TTL - Add island join request/response protocol with member list and blockchain credentials - Add GPU marketplace tracking (offers, bids, providers) in hub manager - Add
This commit is contained in:
@@ -26,11 +26,11 @@ class ChainSettings(BaseSettings):
|
||||
supported_chains: str = "ait-devnet" # Comma-separated list of supported chain IDs
|
||||
db_path: Path = Path("/var/lib/aitbc/data/chain.db")
|
||||
|
||||
rpc_bind_host: str = "127.0.0.1"
|
||||
rpc_bind_host: str = "0.0.0.0"
|
||||
rpc_bind_port: int = 8080
|
||||
|
||||
p2p_bind_host: str = "127.0.0.2"
|
||||
p2p_bind_port: int = 7070
|
||||
p2p_bind_host: str = "0.0.0.0"
|
||||
p2p_bind_port: int = 8001
|
||||
|
||||
proposer_id: str = ""
|
||||
proposer_key: Optional[str] = None
|
||||
@@ -85,6 +85,9 @@ class ChainSettings(BaseSettings):
|
||||
hub_discovery_url: str = "hub.aitbc.bubuit.net" # Hub discovery DNS
|
||||
bridge_islands: str = "" # Comma-separated list of islands to bridge (optional)
|
||||
|
||||
# Redis Configuration (Hub persistence)
|
||||
redis_url: str = "redis://localhost:6379" # Redis connection URL
|
||||
|
||||
# Keystore for proposer private key (future block signing)
|
||||
keystore_path: Path = Path("/var/lib/aitbc/keystore")
|
||||
keystore_password_file: Path = Path("/var/lib/aitbc/keystore/.password")
|
||||
|
||||
@@ -75,9 +75,9 @@ class P2PDiscovery:
|
||||
"""Add bootstrap node for initial connection"""
|
||||
self.bootstrap_nodes.append((address, port))
|
||||
|
||||
def generate_node_id(self, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from address, port, and public key"""
|
||||
content = f"{address}:{port}:{public_key}"
|
||||
def generate_node_id(self, hostname: str, address: str, port: int, public_key: str) -> str:
|
||||
"""Generate unique node ID from hostname, address, port, and public key"""
|
||||
content = f"{hostname}:{address}:{port}:{public_key}"
|
||||
return hashlib.sha256(content.encode()).hexdigest()
|
||||
|
||||
async def start_discovery(self):
|
||||
|
||||
@@ -6,7 +6,8 @@ DNS-based hub discovery for federated mesh with hardcoded fallback
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from typing import List, Optional, Tuple
|
||||
import json
|
||||
from typing import List, Optional, Tuple, Dict
|
||||
from dataclasses import dataclass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -102,16 +103,73 @@ class HubDiscovery:
|
||||
for address, port in self.FALLBACK_HUBS
|
||||
]
|
||||
|
||||
async def register_hub(self, hub_address: str, hub_port: int, discovery_url: Optional[str] = None) -> bool:
|
||||
async def register_hub(self, hub_info: Dict, discovery_url: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Register this node as a hub (placeholder for future DNS registration)
|
||||
|
||||
Note: This is a placeholder for future DNS registration functionality.
|
||||
Currently, hub registration is done via manual DNS configuration.
|
||||
Register this node as a hub with DNS discovery service
|
||||
|
||||
Args:
|
||||
hub_info: Dictionary containing hub information (node_id, address, port, island_id, island_name, public_address, public_port, public_key_pem)
|
||||
discovery_url: Optional custom discovery URL (uses default if not provided)
|
||||
|
||||
Returns:
|
||||
bool: True if registration successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Hub registration placeholder: {hub_address}:{hub_port}")
|
||||
# Future: Implement dynamic DNS registration
|
||||
return True
|
||||
url = discovery_url or self.discovery_url
|
||||
registration_url = f"https://{url}/api/register"
|
||||
|
||||
try:
|
||||
import httpx
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(registration_url, json=hub_info)
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Successfully registered hub {hub_info.get('node_id')} with DNS discovery service")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"DNS registration failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"DNS registration request failed: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"DNS registration error: {e}")
|
||||
return False
|
||||
|
||||
async def unregister_hub(self, node_id: str, discovery_url: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Unregister this node as a hub from DNS discovery service
|
||||
|
||||
Args:
|
||||
node_id: Node ID to unregister
|
||||
discovery_url: Optional custom discovery URL (uses default if not provided)
|
||||
|
||||
Returns:
|
||||
bool: True if unregistration successful, False otherwise
|
||||
"""
|
||||
url = discovery_url or self.discovery_url
|
||||
unregistration_url = f"https://{url}/api/unregister"
|
||||
|
||||
try:
|
||||
import httpx
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
response = await client.post(unregistration_url, json={"node_id": node_id})
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Successfully unregistered hub {node_id} from DNS discovery service")
|
||||
return True
|
||||
else:
|
||||
logger.error(f"DNS unregistration failed: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"DNS unregistration request failed: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"DNS unregistration error: {e}")
|
||||
return False
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear cached hub list"""
|
||||
|
||||
@@ -6,8 +6,10 @@ Manages hub operations, peer list sharing, and hub registration for federated me
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
from typing import Dict, List, Optional, Set
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from enum import Enum
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -50,45 +52,319 @@ class PeerInfo:
|
||||
|
||||
class HubManager:
|
||||
"""Manages hub operations for federated mesh"""
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int, island_id: str, island_name: str):
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int, island_id: str, island_name: str, redis_url: Optional[str] = None):
|
||||
self.local_node_id = local_node_id
|
||||
self.local_address = local_address
|
||||
self.local_port = local_port
|
||||
self.island_id = island_id
|
||||
self.island_name = island_name
|
||||
|
||||
self.redis_url = redis_url or "redis://localhost:6379"
|
||||
|
||||
# Hub registration status
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at: Optional[float] = None
|
||||
|
||||
|
||||
# Known hubs
|
||||
self.known_hubs: Dict[str, HubInfo] = {} # node_id -> HubInfo
|
||||
|
||||
|
||||
# Peer registry (for providing peer lists)
|
||||
self.peer_registry: Dict[str, PeerInfo] = {} # node_id -> PeerInfo
|
||||
|
||||
|
||||
# Island peers (island_id -> set of node_ids)
|
||||
self.island_peers: Dict[str, Set[str]] = {}
|
||||
|
||||
|
||||
self.running = False
|
||||
|
||||
self._redis = None
|
||||
|
||||
# Initialize island peers for our island
|
||||
self.island_peers[self.island_id] = set()
|
||||
|
||||
def register_as_hub(self, public_address: Optional[str] = None, public_port: Optional[int] = None) -> bool:
|
||||
async def _connect_redis(self):
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
self._redis = redis.from_url(self.redis_url)
|
||||
await self._redis.ping()
|
||||
logger.info(f"Connected to Redis for hub persistence: {self.redis_url}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
return False
|
||||
|
||||
async def _persist_hub_registration(self, hub_info: HubInfo) -> bool:
|
||||
"""Persist hub registration to Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping persistence")
|
||||
return False
|
||||
|
||||
key = f"hub:{hub_info.node_id}"
|
||||
value = json.dumps(asdict(hub_info), default=str)
|
||||
await self._redis.setex(key, 3600, value) # TTL: 1 hour
|
||||
logger.info(f"Persisted hub registration to Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _remove_hub_registration(self, node_id: str) -> bool:
|
||||
"""Remove hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping removal")
|
||||
return False
|
||||
|
||||
key = f"hub:{node_id}"
|
||||
await self._redis.delete(key)
|
||||
logger.info(f"Removed hub registration from Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _load_hub_registration(self) -> Optional[HubInfo]:
|
||||
"""Load hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
return None
|
||||
|
||||
key = f"hub:{self.local_node_id}"
|
||||
value = await self._redis.get(key)
|
||||
if value:
|
||||
data = json.loads(value)
|
||||
return HubInfo(**data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load hub registration: {e}")
|
||||
return None
|
||||
|
||||
def _get_blockchain_credentials(self) -> dict:
|
||||
"""Get blockchain credentials from keystore"""
|
||||
try:
|
||||
credentials = {}
|
||||
|
||||
# Get genesis block hash from genesis.json
|
||||
genesis_path = '/var/lib/aitbc/data/ait-mainnet/genesis.json'
|
||||
if os.path.exists(genesis_path):
|
||||
with open(genesis_path, 'r') as f:
|
||||
genesis_data = json.load(f)
|
||||
# Get genesis block hash
|
||||
if 'blocks' in genesis_data and len(genesis_data['blocks']) > 0:
|
||||
genesis_block = genesis_data['blocks'][0]
|
||||
credentials['genesis_block_hash'] = genesis_block.get('hash', '')
|
||||
credentials['genesis_block'] = genesis_data
|
||||
|
||||
# Get genesis address from keystore
|
||||
keystore_path = '/var/lib/aitbc/keystore/validator_keys.json'
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's address
|
||||
for key_id, key_data in keys.items():
|
||||
# Extract address from public key or use key_id
|
||||
credentials['genesis_address'] = key_id
|
||||
break
|
||||
|
||||
# Add chain info
|
||||
credentials['chain_id'] = self.island_chain_id or f"ait-{self.island_id[:8]}"
|
||||
credentials['island_id'] = self.island_id
|
||||
credentials['island_name'] = self.island_name
|
||||
|
||||
# Add RPC endpoint (local)
|
||||
credentials['rpc_endpoint'] = f"http://{self.local_address}:8006"
|
||||
credentials['p2p_port'] = self.local_port
|
||||
|
||||
return credentials
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get blockchain credentials: {e}")
|
||||
return {}
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int,
|
||||
island_id: str, island_name: str, redis_url: str):
|
||||
self.local_node_id = local_node_id
|
||||
self.local_address = local_address
|
||||
self.local_port = local_port
|
||||
self.island_id = island_id
|
||||
self.island_name = island_name
|
||||
self.island_chain_id = f"ait-{island_id[:8]}"
|
||||
|
||||
self.known_hubs: Dict[str, HubInfo] = {}
|
||||
self.peer_registry: Dict[str, PeerInfo] = {}
|
||||
self.peer_reputation: Dict[str, float] = {}
|
||||
self.peer_last_seen: Dict[str, float] = {}
|
||||
|
||||
# GPU marketplace tracking
|
||||
self.gpu_offers: Dict[str, dict] = {}
|
||||
self.gpu_bids: Dict[str, dict] = {}
|
||||
self.gpu_providers: Dict[str, dict] = {} # node_id -> gpu info
|
||||
|
||||
# Exchange tracking
|
||||
self.exchange_orders: Dict[str, dict] = {} # order_id -> order info
|
||||
self.exchange_order_books: Dict[str, Dict] = {} # pair -> {bids: [], asks: []}
|
||||
|
||||
# Redis client for persistence
|
||||
self.redis_url = redis_url
|
||||
self._redis_client = None
|
||||
|
||||
async def handle_join_request(self, join_request: dict) -> Optional[dict]:
|
||||
"""
|
||||
Handle island join request from a new node
|
||||
|
||||
Args:
|
||||
join_request: Dictionary containing join request data
|
||||
|
||||
Returns:
|
||||
dict: Join response with member list and credentials, or None if failed
|
||||
"""
|
||||
try:
|
||||
requested_island_id = join_request.get('island_id')
|
||||
|
||||
# Validate island ID
|
||||
if requested_island_id != self.island_id:
|
||||
logger.warning(f"Join request for island {requested_island_id} does not match our island {self.island_id}")
|
||||
return None
|
||||
|
||||
# Get all island members
|
||||
members = []
|
||||
for node_id, peer_info in self.peer_registry.items():
|
||||
if peer_info.island_id == self.island_id:
|
||||
members.append({
|
||||
'node_id': peer_info.node_id,
|
||||
'address': peer_info.address,
|
||||
'port': peer_info.port,
|
||||
'is_hub': peer_info.is_hub,
|
||||
'public_address': peer_info.public_address,
|
||||
'public_port': peer_info.public_port
|
||||
})
|
||||
|
||||
# Include self in member list
|
||||
members.append({
|
||||
'node_id': self.local_node_id,
|
||||
'address': self.local_address,
|
||||
'port': self.local_port,
|
||||
'is_hub': True,
|
||||
'public_address': self.known_hubs.get(self.local_node_id, {}).public_address if self.local_node_id in self.known_hubs else None,
|
||||
'public_port': self.known_hubs.get(self.local_node_id, {}).public_port if self.local_node_id in self.known_hubs else None
|
||||
})
|
||||
|
||||
# Get blockchain credentials
|
||||
credentials = self._get_blockchain_credentials()
|
||||
|
||||
# Build response
|
||||
response = {
|
||||
'type': 'join_response',
|
||||
'island_id': self.island_id,
|
||||
'island_name': self.island_name,
|
||||
'island_chain_id': self.island_chain_id or f"ait-{self.island_id[:8]}",
|
||||
'members': members,
|
||||
'credentials': credentials
|
||||
}
|
||||
|
||||
logger.info(f"Sent join_response to node {join_request.get('node_id')} with {len(members)} members")
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling join request: {e}")
|
||||
return None
|
||||
|
||||
def register_gpu_offer(self, offer_data: dict) -> bool:
|
||||
"""Register a GPU marketplace offer in the hub"""
|
||||
try:
|
||||
offer_id = offer_data.get('offer_id')
|
||||
if offer_id:
|
||||
self.gpu_offers[offer_id] = offer_data
|
||||
logger.info(f"Registered GPU offer: {offer_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU offer: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_bid(self, bid_data: dict) -> bool:
|
||||
"""Register a GPU marketplace bid in the hub"""
|
||||
try:
|
||||
bid_id = bid_data.get('bid_id')
|
||||
if bid_id:
|
||||
self.gpu_bids[bid_id] = bid_data
|
||||
logger.info(f"Registered GPU bid: {bid_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU bid: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_provider(self, node_id: str, gpu_info: dict) -> bool:
|
||||
"""Register a GPU provider in the hub"""
|
||||
try:
|
||||
self.gpu_providers[node_id] = gpu_info
|
||||
logger.info(f"Registered GPU provider: {node_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU provider: {e}")
|
||||
return False
|
||||
|
||||
def register_exchange_order(self, order_data: dict) -> bool:
|
||||
"""Register an exchange order in the hub"""
|
||||
try:
|
||||
order_id = order_data.get('order_id')
|
||||
if order_id:
|
||||
self.exchange_orders[order_id] = order_data
|
||||
|
||||
# Update order book
|
||||
pair = order_data.get('pair')
|
||||
side = order_data.get('side')
|
||||
if pair and side:
|
||||
if pair not in self.exchange_order_books:
|
||||
self.exchange_order_books[pair] = {'bids': [], 'asks': []}
|
||||
|
||||
if side == 'buy':
|
||||
self.exchange_order_books[pair]['bids'].append(order_data)
|
||||
elif side == 'sell':
|
||||
self.exchange_order_books[pair]['asks'].append(order_data)
|
||||
|
||||
logger.info(f"Registered exchange order: {order_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering exchange order: {e}")
|
||||
return False
|
||||
|
||||
def get_gpu_offers(self) -> list:
|
||||
"""Get all GPU offers"""
|
||||
return list(self.gpu_offers.values())
|
||||
|
||||
def get_gpu_bids(self) -> list:
|
||||
"""Get all GPU bids"""
|
||||
return list(self.gpu_bids.values())
|
||||
|
||||
def get_gpu_providers(self) -> list:
|
||||
"""Get all GPU providers"""
|
||||
return list(self.gpu_providers.values())
|
||||
|
||||
def get_exchange_order_book(self, pair: str) -> dict:
|
||||
"""Get order book for a specific trading pair"""
|
||||
return self.exchange_order_books.get(pair, {'bids': [], 'asks': []})
|
||||
|
||||
async def register_as_hub(self, public_address: Optional[str] = None, public_port: Optional[int] = None) -> bool:
|
||||
"""Register this node as a hub"""
|
||||
if self.is_hub:
|
||||
logger.warning("Already registered as hub")
|
||||
return False
|
||||
|
||||
|
||||
self.is_hub = True
|
||||
self.hub_status = HubStatus.REGISTERED
|
||||
self.registered_at = time.time()
|
||||
|
||||
|
||||
# Add self to known hubs
|
||||
self.known_hubs[self.local_node_id] = HubInfo(
|
||||
hub_info = HubInfo(
|
||||
node_id=self.local_node_id,
|
||||
address=self.local_address,
|
||||
port=self.local_port,
|
||||
@@ -99,24 +375,31 @@ class HubManager:
|
||||
registered_at=time.time(),
|
||||
last_seen=time.time()
|
||||
)
|
||||
|
||||
self.known_hubs[self.local_node_id] = hub_info
|
||||
|
||||
# Persist to Redis
|
||||
await self._persist_hub_registration(hub_info)
|
||||
|
||||
logger.info(f"Registered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
def unregister_as_hub(self) -> bool:
|
||||
async def unregister_as_hub(self) -> bool:
|
||||
"""Unregister this node as a hub"""
|
||||
if not self.is_hub:
|
||||
logger.warning("Not registered as hub")
|
||||
return False
|
||||
|
||||
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at = None
|
||||
|
||||
|
||||
# Remove from Redis
|
||||
await self._remove_hub_registration(self.local_node_id)
|
||||
|
||||
# Remove self from known hubs
|
||||
if self.local_node_id in self.known_hubs:
|
||||
del self.known_hubs[self.local_node_id]
|
||||
|
||||
|
||||
logger.info(f"Unregistered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
|
||||
@@ -88,10 +88,11 @@ class P2PNetworkService:
|
||||
self.host,
|
||||
self.port,
|
||||
self.island_id,
|
||||
self.island_name
|
||||
self.island_name,
|
||||
self.config.redis_url
|
||||
)
|
||||
self.hub_manager.register_as_hub(self.public_endpoint[0] if self.public_endpoint else None,
|
||||
self.public_endpoint[1] if self.public_endpoint else None)
|
||||
await self.hub_manager.register_as_hub(self.public_endpoint[0] if self.public_endpoint else None,
|
||||
self.public_endpoint[1] if self.public_endpoint else None)
|
||||
logger.info("Initialized hub manager")
|
||||
|
||||
# Discover public endpoint via STUN if configured
|
||||
@@ -423,6 +424,40 @@ class P2PNetworkService:
|
||||
|
||||
elif msg_type == 'handshake':
|
||||
pass # Ignore subsequent handshakes
|
||||
elif msg_type == 'join_request':
|
||||
# Handle island join request (only if we're a hub)
|
||||
if self.hub_manager:
|
||||
logger.info(f"Received join_request from {peer_id}")
|
||||
response = await self.hub_manager.handle_join_request(message)
|
||||
if response:
|
||||
await self._send_message(writer, response)
|
||||
else:
|
||||
logger.warning(f"Received join_request but not a hub, ignoring")
|
||||
elif msg_type == 'join_response':
|
||||
# Handle island join response (only if we requested to join)
|
||||
logger.info(f"Received join_response from {peer_id}")
|
||||
# Store the response for the CLI to retrieve
|
||||
if not hasattr(self, '_join_response'):
|
||||
self._join_response = {}
|
||||
self._join_response[peer_id] = message
|
||||
elif msg_type == 'gpu_provider_query':
|
||||
# Handle GPU provider query
|
||||
logger.info(f"Received gpu_provider_query from {peer_id}")
|
||||
# Respond with GPU availability
|
||||
gpu_response = {
|
||||
'type': 'gpu_provider_response',
|
||||
'node_id': self.node_id,
|
||||
'gpu_available': self._get_gpu_count(),
|
||||
'gpu_specs': self._get_gpu_specs()
|
||||
}
|
||||
await self._send_message(writer, gpu_response)
|
||||
elif msg_type == 'gpu_provider_response':
|
||||
# Handle GPU provider response
|
||||
logger.info(f"Received gpu_provider_response from {peer_id}")
|
||||
# Store the response for the CLI to retrieve
|
||||
if not hasattr(self, '_gpu_provider_responses'):
|
||||
self._gpu_provider_responses = {}
|
||||
self._gpu_provider_responses[peer_id] = message
|
||||
elif msg_type == 'new_transaction':
|
||||
tx_data = message.get('tx')
|
||||
if tx_data:
|
||||
@@ -470,28 +505,101 @@ class P2PNetworkService:
|
||||
writer.close()
|
||||
try:
|
||||
await writer.wait_closed()
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
async def _send_message(self, writer: asyncio.StreamWriter, message: dict):
|
||||
"""Helper to send a JSON message over a stream"""
|
||||
def _get_gpu_count(self) -> int:
|
||||
"""Get the number of available GPUs on this node"""
|
||||
try:
|
||||
data = json.dumps(message) + '\n'
|
||||
writer.write(data.encode())
|
||||
await writer.drain()
|
||||
# Try to read GPU count from system
|
||||
# This is a placeholder - in a real implementation, this would
|
||||
# query the actual GPU hardware or a configuration file
|
||||
import os
|
||||
gpu_config_path = '/var/lib/aitbc/gpu_config.json'
|
||||
if os.path.exists(gpu_config_path):
|
||||
with open(gpu_config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
return config.get('gpu_count', 0)
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send message: {e}")
|
||||
logger.error(f"Error getting GPU count: {e}")
|
||||
return 0
|
||||
|
||||
async def _ping_peers_loop(self):
|
||||
"""Periodically broadcast pings to all active connections to keep them alive"""
|
||||
while not self._stop_event.is_set():
|
||||
await asyncio.sleep(20)
|
||||
ping_msg = {'type': 'ping', 'node_id': self.node_id}
|
||||
|
||||
# Make a copy of writers to avoid dictionary changed during iteration error
|
||||
writers = list(self.active_connections.values())
|
||||
for writer in writers:
|
||||
await self._send_message(writer, ping_msg)
|
||||
def _get_gpu_specs(self) -> dict:
|
||||
"""Get GPU specifications for this node"""
|
||||
try:
|
||||
# Try to read GPU specs from system
|
||||
# This is a placeholder - in a real implementation, this would
|
||||
# query the actual GPU hardware or a configuration file
|
||||
import os
|
||||
gpu_config_path = '/var/lib/aitbc/gpu_config.json'
|
||||
if os.path.exists(gpu_config_path):
|
||||
with open(gpu_config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
return config.get('specs', {})
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting GPU specs: {e}")
|
||||
return {}
|
||||
|
||||
async def send_join_request(self, hub_address: str, hub_port: int, island_id: str, island_name: str, node_id: str, public_key_pem: str) -> Optional[dict]:
|
||||
"""
|
||||
Send join request to a hub and wait for response
|
||||
|
||||
Args:
|
||||
hub_address: Hub IP address or hostname
|
||||
hub_port: Hub port
|
||||
island_id: Island ID to join
|
||||
island_name: Island name
|
||||
node_id: Local node ID
|
||||
public_key_pem: Public key PEM
|
||||
|
||||
Returns:
|
||||
dict: Join response from hub, or None if failed
|
||||
"""
|
||||
try:
|
||||
# Connect to hub
|
||||
reader, writer = await asyncio.open_connection(hub_address, hub_port)
|
||||
logger.info(f"Connected to hub {hub_address}:{hub_port}")
|
||||
|
||||
# Send join request
|
||||
join_request = {
|
||||
'type': 'join_request',
|
||||
'node_id': node_id,
|
||||
'island_id': island_id,
|
||||
'island_name': island_name,
|
||||
'public_key_pem': public_key_pem
|
||||
}
|
||||
await self._send_message(writer, join_request)
|
||||
logger.info(f"Sent join_request to hub")
|
||||
|
||||
# Wait for join response (with timeout)
|
||||
try:
|
||||
data = await asyncio.wait_for(reader.readline(), timeout=30.0)
|
||||
if data:
|
||||
response = json.loads(data.decode().strip())
|
||||
if response.get('type') == 'join_response':
|
||||
logger.info(f"Received join_response from hub")
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return response
|
||||
else:
|
||||
logger.warning(f"Unexpected response type: {response.get('type')}")
|
||||
else:
|
||||
logger.warning("No response from hub")
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("Timeout waiting for join response")
|
||||
|
||||
writer.close()
|
||||
await writer.wait_closed()
|
||||
return None
|
||||
|
||||
except ConnectionRefusedError:
|
||||
logger.error(f"Hub {hub_address}:{hub_port} refused connection")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send join request: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def run_p2p_service(host: str, port: int, node_id: str, peers: str):
|
||||
|
||||
@@ -60,7 +60,7 @@ def _serialize_receipt(receipt: Receipt) -> Dict[str, Any]:
|
||||
|
||||
|
||||
class TransactionRequest(BaseModel):
|
||||
type: str = Field(description="Transaction type, e.g. TRANSFER or RECEIPT_CLAIM")
|
||||
type: str = Field(description="Transaction type, e.g. TRANSFER, RECEIPT_CLAIM, GPU_MARKETPLACE, EXCHANGE")
|
||||
sender: str
|
||||
nonce: int
|
||||
fee: int = Field(ge=0)
|
||||
@@ -70,8 +70,9 @@ class TransactionRequest(BaseModel):
|
||||
@model_validator(mode="after")
|
||||
def normalize_type(self) -> "TransactionRequest": # type: ignore[override]
|
||||
normalized = self.type.upper()
|
||||
if normalized not in {"TRANSFER", "RECEIPT_CLAIM"}:
|
||||
raise ValueError(f"unsupported transaction type: {self.type}")
|
||||
valid_types = {"TRANSFER", "RECEIPT_CLAIM", "GPU_MARKETPLACE", "EXCHANGE"}
|
||||
if normalized not in valid_types:
|
||||
raise ValueError(f"unsupported transaction type: {normalized}. Valid types: {valid_types}")
|
||||
self.type = normalized
|
||||
return self
|
||||
|
||||
@@ -201,31 +202,83 @@ async def get_mempool(chain_id: str = None, limit: int = 100) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.get("/accounts/{address}", summary="Get account information")
|
||||
async def get_account(address: str) -> Dict[str, Any]:
|
||||
"""Get account information including balance"""
|
||||
from ..models import Account
|
||||
async def get_account(address: str, chain_id: str = None) -> Dict[str, Any]:
|
||||
"""Get account information"""
|
||||
chain_id = get_chain_id(chain_id)
|
||||
|
||||
try:
|
||||
with session_scope() as session:
|
||||
account = session.exec(select(Account).where(Account.address == address)).first()
|
||||
with session_scope() as session:
|
||||
account = session.exec(select(Account).where(Account.address == address).where(Account.chain_id == chain_id)).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"chain_id": account.chain_id
|
||||
}
|
||||
|
||||
|
||||
@router.get("/transactions", summary="Query transactions")
|
||||
async def query_transactions(
|
||||
transaction_type: Optional[str] = None,
|
||||
island_id: Optional[str] = None,
|
||||
pair: Optional[str] = None,
|
||||
status: Optional[str] = None,
|
||||
order_id: Optional[str] = None,
|
||||
limit: Optional[int] = 100,
|
||||
chain_id: str = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Query transactions with optional filters"""
|
||||
chain_id = get_chain_id(chain_id)
|
||||
|
||||
with session_scope() as session:
|
||||
query = select(Transaction).where(Transaction.chain_id == chain_id)
|
||||
|
||||
# Apply filters based on payload fields
|
||||
transactions = session.exec(query).all()
|
||||
|
||||
results = []
|
||||
for tx in transactions:
|
||||
# Filter by transaction type in payload
|
||||
if transaction_type and tx.payload.get('type') != transaction_type:
|
||||
continue
|
||||
|
||||
if account is None:
|
||||
return {
|
||||
"address": address,
|
||||
"balance": 0,
|
||||
"nonce": 0,
|
||||
"exists": False
|
||||
}
|
||||
# Filter by island_id in payload
|
||||
if island_id and tx.payload.get('island_id') != island_id:
|
||||
continue
|
||||
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"exists": True
|
||||
}
|
||||
except Exception as e:
|
||||
_logger.error("Failed to get account", extra={"error": str(e), "address": address})
|
||||
raise HTTPException(status_code=500, detail=f"Failed to get account: {str(e)}")
|
||||
# Filter by pair in payload
|
||||
if pair and tx.payload.get('pair') != pair:
|
||||
continue
|
||||
|
||||
# Filter by status in payload
|
||||
if status and tx.payload.get('status') != status:
|
||||
continue
|
||||
|
||||
# Filter by order_id in payload
|
||||
if order_id and tx.payload.get('order_id') != order_id and tx.payload.get('offer_id') != order_id and tx.payload.get('bid_id') != order_id:
|
||||
continue
|
||||
|
||||
results.append({
|
||||
"transaction_id": tx.id,
|
||||
"tx_hash": tx.tx_hash,
|
||||
"sender": tx.sender,
|
||||
"recipient": tx.recipient,
|
||||
"payload": tx.payload,
|
||||
"status": tx.status,
|
||||
"created_at": tx.created_at.isoformat(),
|
||||
"timestamp": tx.timestamp,
|
||||
"nonce": tx.nonce,
|
||||
"value": tx.value,
|
||||
"fee": tx.fee
|
||||
})
|
||||
|
||||
# Apply limit
|
||||
if limit:
|
||||
results = results[:limit]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/blocks-range", summary="Get blocks in height range")
|
||||
|
||||
Reference in New Issue
Block a user