Expand aitbc package with new utility modules and enhanced HTTP client
- Add new exception types: RetryError, CircuitBreakerOpenError, RateLimitError - Enhance AITBCHTTPClient with retry logic, caching, circuit breaker, and rate limiting - Add AsyncAITBCHTTPClient for async HTTP operations - Add crypto module with Ethereum key derivation, signing, encryption, and hashing utilities - Add web3_utils module with Web3Client and create_web3_client - Add security module with token generation, API key management
This commit is contained in:
@@ -1048,19 +1048,15 @@ async def search_transactions(
|
||||
response = await client.get(f"{rpc_url}/rpc/search/transactions", params=params)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
elif response.status_code == 404:
|
||||
return []
|
||||
else:
|
||||
# Return mock data for demonstration
|
||||
return [
|
||||
{
|
||||
"hash": "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
"type": tx_type or "transfer",
|
||||
"from": "0xabcdef1234567890abcdef1234567890abcdef1234",
|
||||
"to": "0x1234567890abcdef1234567890abcdef12345678",
|
||||
"amount": "1.5",
|
||||
"fee": "0.001",
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
]
|
||||
raise HTTPException(
|
||||
status_code=response.status_code,
|
||||
detail=f"Failed to fetch transactions from blockchain RPC: {response.text}"
|
||||
)
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(status_code=503, detail=f"Blockchain RPC unavailable: {str(e)}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Search failed: {str(e)}")
|
||||
|
||||
@@ -1095,17 +1091,15 @@ async def search_blocks(
|
||||
response = await client.get(f"{rpc_url}/rpc/search/blocks", params=params)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
elif response.status_code == 404:
|
||||
return []
|
||||
else:
|
||||
# Return mock data for demonstration
|
||||
return [
|
||||
{
|
||||
"height": 12345,
|
||||
"hash": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
|
||||
"validator": validator or "0x1234567890abcdef1234567890abcdef12345678",
|
||||
"tx_count": min_tx or 5,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
}
|
||||
]
|
||||
raise HTTPException(
|
||||
status_code=response.status_code,
|
||||
detail=f"Failed to fetch blocks from blockchain RPC: {response.text}"
|
||||
)
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(status_code=503, detail=f"Blockchain RPC unavailable: {str(e)}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Search failed: {str(e)}")
|
||||
|
||||
|
||||
@@ -0,0 +1,564 @@
|
||||
"""
|
||||
Hub Manager
|
||||
Manages hub operations, peer list sharing, and hub registration for federated mesh
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
import socket
|
||||
from typing import Dict, List, Optional, Set
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from enum import Enum
|
||||
from ..config import settings
|
||||
|
||||
from aitbc import get_logger, DATA_DIR, KEYSTORE_DIR
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class HubStatus(Enum):
|
||||
"""Hub registration status"""
|
||||
REGISTERED = "registered"
|
||||
UNREGISTERED = "unregistered"
|
||||
PENDING = "pending"
|
||||
|
||||
|
||||
@dataclass
|
||||
class HubInfo:
|
||||
"""Information about a hub node"""
|
||||
node_id: str
|
||||
address: str
|
||||
port: int
|
||||
island_id: str
|
||||
island_name: str
|
||||
public_address: Optional[str] = None
|
||||
public_port: Optional[int] = None
|
||||
registered_at: float = 0
|
||||
last_seen: float = 0
|
||||
peer_count: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PeerInfo:
|
||||
"""Information about a peer"""
|
||||
node_id: str
|
||||
address: str
|
||||
port: int
|
||||
island_id: str
|
||||
is_hub: bool
|
||||
public_address: Optional[str] = None
|
||||
public_port: Optional[int] = None
|
||||
last_seen: float = 0
|
||||
|
||||
|
||||
class HubManager:
|
||||
"""Manages hub operations for federated mesh"""
|
||||
|
||||
def __init__(self, local_node_id: str, local_address: str, local_port: int, island_id: str, island_name: str, redis_url: Optional[str] = None):
|
||||
self.local_node_id = local_node_id
|
||||
self.local_address = local_address
|
||||
self.local_port = local_port
|
||||
self.island_id = island_id
|
||||
self.island_name = island_name
|
||||
self.island_chain_id = settings.island_chain_id or settings.chain_id or f"ait-{island_id[:8]}"
|
||||
self.redis_url = redis_url or "redis://localhost:6379"
|
||||
|
||||
# Hub registration status
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at: Optional[float] = None
|
||||
|
||||
# Known hubs
|
||||
self.known_hubs: Dict[str, HubInfo] = {} # node_id -> HubInfo
|
||||
|
||||
# Peer registry (for providing peer lists)
|
||||
self.peer_registry: Dict[str, PeerInfo] = {} # node_id -> PeerInfo
|
||||
|
||||
# Island peers (island_id -> set of node_ids)
|
||||
self.island_peers: Dict[str, Set[str]] = {}
|
||||
|
||||
self.running = False
|
||||
self._redis = None
|
||||
|
||||
# Initialize island peers for our island
|
||||
self.island_peers[self.island_id] = set()
|
||||
|
||||
async def _connect_redis(self):
|
||||
"""Connect to Redis"""
|
||||
try:
|
||||
import redis.asyncio as redis
|
||||
self._redis = redis.from_url(self.redis_url)
|
||||
await self._redis.ping()
|
||||
logger.info(f"Connected to Redis for hub persistence: {self.redis_url}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis: {e}")
|
||||
return False
|
||||
|
||||
async def _persist_hub_registration(self, hub_info: HubInfo) -> bool:
|
||||
"""Persist hub registration to Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping persistence")
|
||||
return False
|
||||
|
||||
key = f"hub:{hub_info.node_id}"
|
||||
value = json.dumps(asdict(hub_info), default=str)
|
||||
await self._redis.setex(key, 3600, value) # TTL: 1 hour
|
||||
logger.info(f"Persisted hub registration to Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _remove_hub_registration(self, node_id: str) -> bool:
|
||||
"""Remove hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
logger.warning("Redis not available, skipping removal")
|
||||
return False
|
||||
|
||||
key = f"hub:{node_id}"
|
||||
await self._redis.delete(key)
|
||||
logger.info(f"Removed hub registration from Redis: {key}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove hub registration: {e}")
|
||||
return False
|
||||
|
||||
async def _load_hub_registration(self) -> Optional[HubInfo]:
|
||||
"""Load hub registration from Redis"""
|
||||
try:
|
||||
if not self._redis:
|
||||
await self._connect_redis()
|
||||
|
||||
if not self._redis:
|
||||
return None
|
||||
|
||||
key = f"hub:{self.local_node_id}"
|
||||
value = await self._redis.get(key)
|
||||
if value:
|
||||
data = json.loads(value)
|
||||
return HubInfo(**data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load hub registration: {e}")
|
||||
return None
|
||||
|
||||
def _get_blockchain_credentials(self) -> dict:
|
||||
"""Get blockchain credentials from keystore"""
|
||||
try:
|
||||
credentials = {}
|
||||
|
||||
# Get genesis block hash from genesis.json
|
||||
genesis_candidates = [
|
||||
str(settings.db_path.parent / 'genesis.json'),
|
||||
f"{DATA_DIR}/data/{settings.chain_id}/genesis.json",
|
||||
f'{DATA_DIR}/data/ait-mainnet/genesis.json',
|
||||
]
|
||||
for genesis_path in genesis_candidates:
|
||||
if os.path.exists(genesis_path):
|
||||
with open(genesis_path, 'r') as f:
|
||||
genesis_data = json.load(f)
|
||||
if 'blocks' in genesis_data and len(genesis_data['blocks']) > 0:
|
||||
genesis_block = genesis_data['blocks'][0]
|
||||
credentials['genesis_block_hash'] = genesis_block.get('hash', '')
|
||||
credentials['genesis_block'] = genesis_data
|
||||
break
|
||||
|
||||
# Get genesis address from keystore
|
||||
keystore_path = str(KEYSTORE_DIR / 'validator_keys.json')
|
||||
if os.path.exists(keystore_path):
|
||||
with open(keystore_path, 'r') as f:
|
||||
keys = json.load(f)
|
||||
# Get first key's address
|
||||
for key_id, key_data in keys.items():
|
||||
# Extract address from public key or use key_id
|
||||
credentials['genesis_address'] = key_id
|
||||
break
|
||||
|
||||
# Add chain info
|
||||
credentials['chain_id'] = self.island_chain_id
|
||||
credentials['island_id'] = self.island_id
|
||||
credentials['island_name'] = self.island_name
|
||||
|
||||
# Add RPC endpoint (local)
|
||||
rpc_host = self.local_address
|
||||
if rpc_host in {"0.0.0.0", "127.0.0.1", "localhost", ""}:
|
||||
rpc_host = settings.hub_discovery_url or socket.gethostname()
|
||||
credentials['rpc_endpoint'] = f"http://{rpc_host}:8006"
|
||||
credentials['p2p_port'] = self.local_port
|
||||
|
||||
return credentials
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get blockchain credentials: {e}")
|
||||
return {}
|
||||
|
||||
async def handle_join_request(self, join_request: dict) -> Optional[dict]:
|
||||
"""
|
||||
Handle island join request from a new node
|
||||
|
||||
Args:
|
||||
join_request: Dictionary containing join request data
|
||||
|
||||
Returns:
|
||||
dict: Join response with member list and credentials, or None if failed
|
||||
"""
|
||||
try:
|
||||
requested_island_id = join_request.get('island_id')
|
||||
|
||||
# Validate island ID
|
||||
if requested_island_id != self.island_id:
|
||||
logger.warning(f"Join request for island {requested_island_id} does not match our island {self.island_id}")
|
||||
return None
|
||||
|
||||
# Get all island members
|
||||
members = []
|
||||
for node_id, peer_info in self.peer_registry.items():
|
||||
if peer_info.island_id == self.island_id:
|
||||
members.append({
|
||||
'node_id': peer_info.node_id,
|
||||
'address': peer_info.address,
|
||||
'port': peer_info.port,
|
||||
'is_hub': peer_info.is_hub,
|
||||
'public_address': peer_info.public_address,
|
||||
'public_port': peer_info.public_port
|
||||
})
|
||||
|
||||
# Include self in member list
|
||||
members.append({
|
||||
'node_id': self.local_node_id,
|
||||
'address': self.local_address,
|
||||
'port': self.local_port,
|
||||
'is_hub': True,
|
||||
'public_address': self.known_hubs.get(self.local_node_id, {}).public_address if self.local_node_id in self.known_hubs else None,
|
||||
'public_port': self.known_hubs.get(self.local_node_id, {}).public_port if self.local_node_id in self.known_hubs else None
|
||||
})
|
||||
|
||||
# Get blockchain credentials
|
||||
credentials = self._get_blockchain_credentials()
|
||||
|
||||
# Build response
|
||||
response = {
|
||||
'type': 'join_response',
|
||||
'island_id': self.island_id,
|
||||
'island_name': self.island_name,
|
||||
'island_chain_id': self.island_chain_id or f"ait-{self.island_id[:8]}",
|
||||
'members': members,
|
||||
'credentials': credentials
|
||||
}
|
||||
|
||||
logger.info(f"Sent join_response to node {join_request.get('node_id')} with {len(members)} members")
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling join request: {e}")
|
||||
return None
|
||||
|
||||
def register_gpu_offer(self, offer_data: dict) -> bool:
|
||||
"""Register a GPU marketplace offer in the hub"""
|
||||
try:
|
||||
offer_id = offer_data.get('offer_id')
|
||||
if offer_id:
|
||||
self.gpu_offers[offer_id] = offer_data
|
||||
logger.info(f"Registered GPU offer: {offer_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU offer: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_bid(self, bid_data: dict) -> bool:
|
||||
"""Register a GPU marketplace bid in the hub"""
|
||||
try:
|
||||
bid_id = bid_data.get('bid_id')
|
||||
if bid_id:
|
||||
self.gpu_bids[bid_id] = bid_data
|
||||
logger.info(f"Registered GPU bid: {bid_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU bid: {e}")
|
||||
return False
|
||||
|
||||
def register_gpu_provider(self, node_id: str, gpu_info: dict) -> bool:
|
||||
"""Register a GPU provider in the hub"""
|
||||
try:
|
||||
self.gpu_providers[node_id] = gpu_info
|
||||
logger.info(f"Registered GPU provider: {node_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering GPU provider: {e}")
|
||||
return False
|
||||
|
||||
def register_exchange_order(self, order_data: dict) -> bool:
|
||||
"""Register an exchange order in the hub"""
|
||||
try:
|
||||
order_id = order_data.get('order_id')
|
||||
if order_id:
|
||||
self.exchange_orders[order_id] = order_data
|
||||
|
||||
# Update order book
|
||||
pair = order_data.get('pair')
|
||||
side = order_data.get('side')
|
||||
if pair and side:
|
||||
if pair not in self.exchange_order_books:
|
||||
self.exchange_order_books[pair] = {'bids': [], 'asks': []}
|
||||
|
||||
if side == 'buy':
|
||||
self.exchange_order_books[pair]['bids'].append(order_data)
|
||||
elif side == 'sell':
|
||||
self.exchange_order_books[pair]['asks'].append(order_data)
|
||||
|
||||
logger.info(f"Registered exchange order: {order_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error registering exchange order: {e}")
|
||||
return False
|
||||
|
||||
def get_gpu_offers(self) -> list:
|
||||
"""Get all GPU offers"""
|
||||
return list(self.gpu_offers.values())
|
||||
|
||||
def get_gpu_bids(self) -> list:
|
||||
"""Get all GPU bids"""
|
||||
return list(self.gpu_bids.values())
|
||||
|
||||
def get_gpu_providers(self) -> list:
|
||||
"""Get all GPU providers"""
|
||||
return list(self.gpu_providers.values())
|
||||
|
||||
def get_exchange_order_book(self, pair: str) -> dict:
|
||||
"""Get order book for a specific trading pair"""
|
||||
return self.exchange_order_books.get(pair, {'bids': [], 'asks': []})
|
||||
|
||||
async def register_as_hub(self, public_address: Optional[str] = None, public_port: Optional[int] = None) -> bool:
|
||||
"""Register this node as a hub"""
|
||||
if self.is_hub:
|
||||
logger.warning("Already registered as hub")
|
||||
return False
|
||||
|
||||
self.is_hub = True
|
||||
self.hub_status = HubStatus.REGISTERED
|
||||
self.registered_at = time.time()
|
||||
|
||||
# Add self to known hubs
|
||||
hub_info = HubInfo(
|
||||
node_id=self.local_node_id,
|
||||
address=self.local_address,
|
||||
port=self.local_port,
|
||||
island_id=self.island_id,
|
||||
island_name=self.island_name,
|
||||
public_address=public_address,
|
||||
public_port=public_port,
|
||||
registered_at=time.time(),
|
||||
last_seen=time.time()
|
||||
)
|
||||
self.known_hubs[self.local_node_id] = hub_info
|
||||
|
||||
# Persist to Redis
|
||||
await self._persist_hub_registration(hub_info)
|
||||
|
||||
logger.info(f"Registered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
async def unregister_as_hub(self) -> bool:
|
||||
"""Unregister this node as a hub"""
|
||||
if not self.is_hub:
|
||||
logger.warning("Not registered as hub")
|
||||
return False
|
||||
|
||||
self.is_hub = False
|
||||
self.hub_status = HubStatus.UNREGISTERED
|
||||
self.registered_at = None
|
||||
|
||||
# Remove from Redis
|
||||
await self._remove_hub_registration(self.local_node_id)
|
||||
|
||||
# Remove self from known hubs
|
||||
if self.local_node_id in self.known_hubs:
|
||||
del self.known_hubs[self.local_node_id]
|
||||
|
||||
logger.info(f"Unregistered as hub for island {self.island_id}")
|
||||
return True
|
||||
|
||||
def register_peer(self, peer_info: PeerInfo) -> bool:
|
||||
"""Register a peer in the registry"""
|
||||
self.peer_registry[peer_info.node_id] = peer_info
|
||||
|
||||
# Add to island peers
|
||||
if peer_info.island_id not in self.island_peers:
|
||||
self.island_peers[peer_info.island_id] = set()
|
||||
self.island_peers[peer_info.island_id].add(peer_info.node_id)
|
||||
|
||||
# Update hub peer count if peer is a hub
|
||||
if peer_info.is_hub and peer_info.node_id in self.known_hubs:
|
||||
self.known_hubs[peer_info.node_id].peer_count = len(self.island_peers.get(peer_info.island_id, set()))
|
||||
|
||||
logger.debug(f"Registered peer {peer_info.node_id} in island {peer_info.island_id}")
|
||||
return True
|
||||
|
||||
def unregister_peer(self, node_id: str) -> bool:
|
||||
"""Unregister a peer from the registry"""
|
||||
if node_id not in self.peer_registry:
|
||||
return False
|
||||
|
||||
peer_info = self.peer_registry[node_id]
|
||||
|
||||
# Remove from island peers
|
||||
if peer_info.island_id in self.island_peers:
|
||||
self.island_peers[peer_info.island_id].discard(node_id)
|
||||
|
||||
del self.peer_registry[node_id]
|
||||
|
||||
# Update hub peer count
|
||||
if node_id in self.known_hubs:
|
||||
self.known_hubs[node_id].peer_count = len(self.island_peers.get(self.known_hubs[node_id].island_id, set()))
|
||||
|
||||
logger.debug(f"Unregistered peer {node_id}")
|
||||
return True
|
||||
|
||||
def add_known_hub(self, hub_info: HubInfo):
|
||||
"""Add a known hub to the registry"""
|
||||
self.known_hubs[hub_info.node_id] = hub_info
|
||||
logger.info(f"Added known hub {hub_info.node_id} for island {hub_info.island_id}")
|
||||
|
||||
def remove_known_hub(self, node_id: str) -> bool:
|
||||
"""Remove a known hub from the registry"""
|
||||
if node_id not in self.known_hubs:
|
||||
return False
|
||||
|
||||
del self.known_hubs[node_id]
|
||||
logger.info(f"Removed known hub {node_id}")
|
||||
return True
|
||||
|
||||
def get_peer_list(self, island_id: str) -> List[PeerInfo]:
|
||||
"""Get peer list for a specific island"""
|
||||
peers = []
|
||||
for node_id, peer_info in self.peer_registry.items():
|
||||
if peer_info.island_id == island_id:
|
||||
peers.append(peer_info)
|
||||
return peers
|
||||
|
||||
def get_hub_list(self, island_id: Optional[str] = None) -> List[HubInfo]:
|
||||
"""Get list of known hubs, optionally filtered by island"""
|
||||
hubs = []
|
||||
for hub_info in self.known_hubs.values():
|
||||
if island_id is None or hub_info.island_id == island_id:
|
||||
hubs.append(hub_info)
|
||||
return hubs
|
||||
|
||||
def get_island_peers(self, island_id: str) -> Set[str]:
|
||||
"""Get set of peer node IDs in an island"""
|
||||
return self.island_peers.get(island_id, set()).copy()
|
||||
|
||||
def get_peer_count(self, island_id: str) -> int:
|
||||
"""Get number of peers in an island"""
|
||||
return len(self.island_peers.get(island_id, set()))
|
||||
|
||||
def get_hub_info(self, node_id: str) -> Optional[HubInfo]:
|
||||
"""Get information about a specific hub"""
|
||||
return self.known_hubs.get(node_id)
|
||||
|
||||
def get_peer_info(self, node_id: str) -> Optional[PeerInfo]:
|
||||
"""Get information about a specific peer"""
|
||||
return self.peer_registry.get(node_id)
|
||||
|
||||
def update_peer_last_seen(self, node_id: str):
|
||||
"""Update the last seen time for a peer"""
|
||||
if node_id in self.peer_registry:
|
||||
self.peer_registry[node_id].last_seen = time.time()
|
||||
|
||||
if node_id in self.known_hubs:
|
||||
self.known_hubs[node_id].last_seen = time.time()
|
||||
|
||||
async def start(self):
|
||||
"""Start hub manager"""
|
||||
self.running = True
|
||||
logger.info(f"Starting hub manager for node {self.local_node_id}")
|
||||
|
||||
# Start background tasks
|
||||
tasks = [
|
||||
asyncio.create_task(self._hub_health_check()),
|
||||
asyncio.create_task(self._peer_cleanup())
|
||||
]
|
||||
|
||||
try:
|
||||
await asyncio.gather(*tasks)
|
||||
except Exception as e:
|
||||
logger.error(f"Hub manager error: {e}")
|
||||
finally:
|
||||
self.running = False
|
||||
|
||||
async def stop(self):
|
||||
"""Stop hub manager"""
|
||||
self.running = False
|
||||
logger.info("Stopping hub manager")
|
||||
|
||||
async def _hub_health_check(self):
|
||||
"""Check health of known hubs"""
|
||||
while self.running:
|
||||
try:
|
||||
current_time = time.time()
|
||||
|
||||
# Check for offline hubs (not seen for 10 minutes)
|
||||
offline_hubs = []
|
||||
for node_id, hub_info in self.known_hubs.items():
|
||||
if current_time - hub_info.last_seen > 600:
|
||||
offline_hubs.append(node_id)
|
||||
logger.warning(f"Hub {node_id} appears to be offline")
|
||||
|
||||
# Remove offline hubs (keep self if we're a hub)
|
||||
for node_id in offline_hubs:
|
||||
if node_id != self.local_node_id:
|
||||
self.remove_known_hub(node_id)
|
||||
|
||||
await asyncio.sleep(60) # Check every minute
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Hub health check error: {e}")
|
||||
await asyncio.sleep(10)
|
||||
|
||||
async def _peer_cleanup(self):
|
||||
"""Clean up stale peer entries"""
|
||||
while self.running:
|
||||
try:
|
||||
current_time = time.time()
|
||||
|
||||
# Remove peers not seen for 5 minutes
|
||||
stale_peers = []
|
||||
for node_id, peer_info in self.peer_registry.items():
|
||||
if current_time - peer_info.last_seen > 300:
|
||||
stale_peers.append(node_id)
|
||||
|
||||
for node_id in stale_peers:
|
||||
self.unregister_peer(node_id)
|
||||
logger.debug(f"Removed stale peer {node_id}")
|
||||
|
||||
await asyncio.sleep(60) # Check every minute
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Peer cleanup error: {e}")
|
||||
await asyncio.sleep(10)
|
||||
|
||||
|
||||
# Global hub manager instance
|
||||
hub_manager_instance: Optional[HubManager] = None
|
||||
|
||||
|
||||
def get_hub_manager() -> Optional[HubManager]:
|
||||
"""Get global hub manager instance"""
|
||||
return hub_manager_instance
|
||||
|
||||
|
||||
def create_hub_manager(node_id: str, address: str, port: int, island_id: str, island_name: str) -> HubManager:
|
||||
"""Create and set global hub manager instance"""
|
||||
global hub_manager_instance
|
||||
hub_manager_instance = HubManager(node_id, address, port, island_id, island_name)
|
||||
return hub_manager_instance
|
||||
|
||||
@@ -7,8 +7,10 @@ from psycopg2.extras import RealDictCursor
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
from aitbc.constants import DATA_DIR
|
||||
|
||||
# Database configurations
|
||||
SQLITE_DB = "/var/lib/aitbc/data/coordinator.db"
|
||||
SQLITE_DB = str(DATA_DIR / "data/coordinator.db")
|
||||
PG_CONFIG = {
|
||||
"host": "localhost",
|
||||
"database": "aitbc_coordinator",
|
||||
|
||||
@@ -12,7 +12,7 @@ from decimal import Decimal
|
||||
from enum import StrEnum
|
||||
from typing import Any
|
||||
|
||||
from aitbc import get_logger
|
||||
from aitbc import get_logger, derive_ethereum_address, sign_transaction_hash, verify_signature, encrypt_private_key, Web3Client
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -174,6 +174,8 @@ class EthereumWalletAdapter(EnhancedWalletAdapter):
|
||||
def __init__(self, chain_id: int, rpc_url: str, security_level: SecurityLevel = SecurityLevel.MEDIUM):
|
||||
super().__init__(chain_id, ChainType.ETHEREUM, rpc_url, security_level)
|
||||
self.chain_id = chain_id
|
||||
# Initialize Web3 client for blockchain operations
|
||||
self._web3_client = Web3Client(rpc_url)
|
||||
|
||||
async def create_wallet(self, owner_address: str, security_config: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Create a new Ethereum wallet with enhanced security"""
|
||||
@@ -446,25 +448,36 @@ class EthereumWalletAdapter(EnhancedWalletAdapter):
|
||||
# Private helper methods
|
||||
async def _derive_address_from_private_key(self, private_key: str) -> str:
|
||||
"""Derive Ethereum address from private key"""
|
||||
# This would use actual Ethereum cryptography
|
||||
# For now, return a mock address
|
||||
return f"0x{hashlib.sha256(private_key.encode()).hexdigest()[:40]}"
|
||||
try:
|
||||
return derive_ethereum_address(private_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to derive address from private key: {e}")
|
||||
raise
|
||||
|
||||
async def _encrypt_private_key(self, private_key: str, security_config: dict[str, Any]) -> str:
|
||||
"""Encrypt private key with security configuration"""
|
||||
# This would use actual encryption
|
||||
# For now, return mock encrypted key
|
||||
return f"encrypted_{hashlib.sha256(private_key.encode()).hexdigest()}"
|
||||
try:
|
||||
password = security_config.get("encryption_password", "default_password")
|
||||
return encrypt_private_key(private_key, password)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to encrypt private key: {e}")
|
||||
raise
|
||||
|
||||
async def _get_eth_balance(self, address: str) -> str:
|
||||
"""Get ETH balance in wei"""
|
||||
# Mock implementation
|
||||
return "1000000000000000000" # 1 ETH in wei
|
||||
try:
|
||||
return self._web3_client.get_eth_balance(address)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get ETH balance: {e}")
|
||||
raise
|
||||
|
||||
async def _get_token_balance(self, address: str, token_address: str) -> dict[str, Any]:
|
||||
"""Get ERC-20 token balance"""
|
||||
# Mock implementation
|
||||
return {"balance": "100000000000000000000", "decimals": 18, "symbol": "TOKEN"} # 100 tokens
|
||||
try:
|
||||
return self._web3_client.get_token_balance(address, token_address)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get token balance: {e}")
|
||||
raise
|
||||
|
||||
async def _create_erc20_transfer(
|
||||
self, from_address: str, to_address: str, token_address: str, amount: int
|
||||
@@ -493,78 +506,116 @@ class EthereumWalletAdapter(EnhancedWalletAdapter):
|
||||
|
||||
async def _get_gas_price(self) -> int:
|
||||
"""Get current gas price"""
|
||||
# Mock implementation
|
||||
return 20000000000 # 20 Gwei in wei
|
||||
try:
|
||||
return self._web3_client.get_gas_price()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get gas price: {e}")
|
||||
raise
|
||||
|
||||
async def _get_gas_price_gwei(self) -> float:
|
||||
"""Get current gas price in Gwei"""
|
||||
gas_price_wei = await self._get_gas_price()
|
||||
return gas_price_wei / 10**9
|
||||
try:
|
||||
return self._web3_client.get_gas_price_gwei()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get gas price in Gwei: {e}")
|
||||
raise
|
||||
|
||||
async def _get_nonce(self, address: str) -> int:
|
||||
"""Get transaction nonce for address"""
|
||||
# Mock implementation
|
||||
return 0
|
||||
try:
|
||||
return self._web3_client.get_nonce(address)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get nonce: {e}")
|
||||
raise
|
||||
|
||||
async def _sign_transaction(self, transaction_data: dict[str, Any], from_address: str) -> str:
|
||||
"""Sign transaction"""
|
||||
# Mock implementation
|
||||
return f"0xsigned_{hashlib.sha256(str(transaction_data).encode()).hexdigest()}"
|
||||
try:
|
||||
# Get the transaction hash
|
||||
from eth_account import Account
|
||||
# Remove 0x prefix if present
|
||||
if from_address.startswith("0x"):
|
||||
from_address = from_address[2:]
|
||||
|
||||
account = Account.from_key(from_address)
|
||||
|
||||
# Build transaction dict for signing
|
||||
tx_dict = {
|
||||
'nonce': int(transaction_data.get('nonce', 0), 16),
|
||||
'gasPrice': int(transaction_data.get('gasPrice', 0), 16),
|
||||
'gas': int(transaction_data.get('gas', 0), 16),
|
||||
'to': transaction_data.get('to'),
|
||||
'value': int(transaction_data.get('value', '0x0'), 16),
|
||||
'data': transaction_data.get('data', '0x'),
|
||||
'chainId': transaction_data.get('chainId', 1)
|
||||
}
|
||||
|
||||
signed_tx = account.sign_transaction(tx_dict)
|
||||
return signed_tx.raw_transaction.hex()
|
||||
except ImportError:
|
||||
raise ImportError("eth-account is required for transaction signing. Install with: pip install eth-account")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sign transaction: {e}")
|
||||
raise
|
||||
|
||||
async def _send_raw_transaction(self, signed_transaction: str) -> str:
|
||||
"""Send raw transaction"""
|
||||
# Mock implementation
|
||||
return f"0x{hashlib.sha256(signed_transaction.encode()).hexdigest()}"
|
||||
try:
|
||||
return self._web3_client.send_raw_transaction(signed_transaction)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send raw transaction: {e}")
|
||||
raise
|
||||
|
||||
async def _get_transaction_receipt(self, tx_hash: str) -> dict[str, Any] | None:
|
||||
"""Get transaction receipt"""
|
||||
# Mock implementation
|
||||
return {
|
||||
"status": 1,
|
||||
"blockNumber": "0x12345",
|
||||
"blockHash": "0xabcdef",
|
||||
"gasUsed": "0x5208",
|
||||
"effectiveGasPrice": "0x4a817c800",
|
||||
"logs": [],
|
||||
}
|
||||
try:
|
||||
return self._web3_client.get_transaction_receipt(tx_hash)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transaction receipt: {e}")
|
||||
raise
|
||||
|
||||
async def _get_transaction_by_hash(self, tx_hash: str) -> dict[str, Any]:
|
||||
"""Get transaction by hash"""
|
||||
# Mock implementation
|
||||
return {"from": "0xsender", "to": "0xreceiver", "value": "0xde0b6b3a7640000", "data": "0x"} # 1 ETH in wei
|
||||
try:
|
||||
return self._web3_client.get_transaction_by_hash(tx_hash)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transaction by hash: {e}")
|
||||
raise
|
||||
|
||||
async def _estimate_gas_call(self, call_data: dict[str, Any]) -> str:
|
||||
"""Estimate gas for call"""
|
||||
# Mock implementation
|
||||
return "0x5208" # 21000 in hex
|
||||
try:
|
||||
gas_estimate = self._web3_client.estimate_gas(call_data)
|
||||
return hex(gas_estimate)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to estimate gas: {e}")
|
||||
raise
|
||||
|
||||
async def _get_wallet_transactions(
|
||||
self, address: str, limit: int, offset: int, from_block: int | None, to_block: int | None
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Get wallet transactions"""
|
||||
# Mock implementation
|
||||
return [
|
||||
{
|
||||
"hash": f"0x{hashlib.sha256(f'tx_{i}'.encode()).hexdigest()}",
|
||||
"from": address,
|
||||
"to": f"0x{hashlib.sha256(f'to_{i}'.encode()).hexdigest()[:40]}",
|
||||
"value": "0xde0b6b3a7640000",
|
||||
"blockNumber": f"0x{12345 + i}",
|
||||
"timestamp": datetime.utcnow().timestamp(),
|
||||
"gasUsed": "0x5208",
|
||||
}
|
||||
for i in range(min(limit, 10))
|
||||
]
|
||||
try:
|
||||
return self._web3_client.get_wallet_transactions(address, limit)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get wallet transactions: {e}")
|
||||
raise
|
||||
|
||||
async def _sign_hash(self, message_hash: str, private_key: str) -> str:
|
||||
"""Sign a hash with private key"""
|
||||
# Mock implementation
|
||||
return f"0x{hashlib.sha256(f'{message_hash}{private_key}'.encode()).hexdigest()}"
|
||||
try:
|
||||
return sign_transaction_hash(message_hash, private_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sign hash: {e}")
|
||||
raise
|
||||
|
||||
async def _verify_signature(self, message_hash: str, signature: str, address: str) -> bool:
|
||||
"""Verify a signature"""
|
||||
# Mock implementation
|
||||
return True
|
||||
try:
|
||||
return verify_signature(message_hash, signature, address)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to verify signature: {e}")
|
||||
return False
|
||||
|
||||
|
||||
class PolygonWalletAdapter(EthereumWalletAdapter):
|
||||
|
||||
@@ -7,7 +7,6 @@ Multi-chain trading with cross-chain swaps and bridging
|
||||
import sqlite3
|
||||
import json
|
||||
import asyncio
|
||||
import httpx
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any
|
||||
from fastapi import FastAPI, HTTPException, Query, BackgroundTasks
|
||||
@@ -17,8 +16,15 @@ import os
|
||||
import uuid
|
||||
import hashlib
|
||||
|
||||
from aitbc.http_client import AsyncAITBCHTTPClient
|
||||
from aitbc.aitbc_logging import get_logger
|
||||
from aitbc.exceptions import NetworkError
|
||||
|
||||
app = FastAPI(title="AITBC Complete Cross-Chain Exchange", version="3.0.0")
|
||||
|
||||
# Initialize logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# Database configuration
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "exchange_multichain.db")
|
||||
|
||||
@@ -368,10 +374,10 @@ async def health_check():
|
||||
|
||||
if chain_info["status"] == "active" and chain_info["blockchain_url"]:
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{chain_info['blockchain_url']}/health", timeout=5.0)
|
||||
chain_status[chain_id]["connected"] = response.status_code == 200
|
||||
except:
|
||||
client = AsyncAITBCHTTPClient(base_url=chain_info['blockchain_url'], timeout=5)
|
||||
response = await client.async_get("/health")
|
||||
chain_status[chain_id]["connected"] = response is not None
|
||||
except NetworkError:
|
||||
pass
|
||||
|
||||
return {
|
||||
|
||||
@@ -7,7 +7,6 @@ Complete multi-chain trading with chain isolation
|
||||
import sqlite3
|
||||
import json
|
||||
import asyncio
|
||||
import httpx
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any
|
||||
from fastapi import FastAPI, HTTPException, Query, BackgroundTasks
|
||||
@@ -15,8 +14,15 @@ from pydantic import BaseModel, Field
|
||||
import uvicorn
|
||||
import os
|
||||
|
||||
from aitbc.http_client import AsyncAITBCHTTPClient
|
||||
from aitbc.aitbc_logging import get_logger
|
||||
from aitbc.exceptions import NetworkError
|
||||
|
||||
app = FastAPI(title="AITBC Multi-Chain Exchange", version="2.0.0")
|
||||
|
||||
# Initialize logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# Database configuration
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "exchange_multichain.db")
|
||||
|
||||
@@ -145,10 +151,10 @@ async def verify_chain_transaction(chain_id: str, tx_hash: str) -> bool:
|
||||
return False
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{chain_info['blockchain_url']}/api/v1/transactions/{tx_hash}")
|
||||
return response.status_code == 200
|
||||
except:
|
||||
client = AsyncAITBCHTTPClient(base_url=chain_info['blockchain_url'], timeout=5)
|
||||
response = await client.async_get(f"/api/v1/transactions/{tx_hash}")
|
||||
return response is not None
|
||||
except NetworkError:
|
||||
return False
|
||||
|
||||
async def submit_chain_transaction(chain_id: str, order_data: Dict) -> Optional[str]:
|
||||
@@ -161,16 +167,13 @@ async def submit_chain_transaction(chain_id: str, order_data: Dict) -> Optional[
|
||||
return None
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{chain_info['blockchain_url']}/api/v1/transactions",
|
||||
json=order_data
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json().get("tx_hash")
|
||||
except Exception as e:
|
||||
print(f"Chain transaction error: {e}")
|
||||
|
||||
client = AsyncAITBCHTTPClient(base_url=chain_info['blockchain_url'], timeout=10)
|
||||
response = await client.async_post("/api/v1/transactions", json=order_data)
|
||||
if response:
|
||||
return response.get("tx_hash")
|
||||
except NetworkError as e:
|
||||
logger.error(f"Chain transaction error: {e}")
|
||||
|
||||
return None
|
||||
|
||||
# API Endpoints
|
||||
@@ -188,10 +191,10 @@ async def health_check():
|
||||
|
||||
if chain_info["status"] == "active" and chain_info["blockchain_url"]:
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{chain_info['blockchain_url']}/health", timeout=5.0)
|
||||
chain_status[chain_id]["connected"] = response.status_code == 200
|
||||
except:
|
||||
client = AsyncAITBCHTTPClient(base_url=chain_info['blockchain_url'], timeout=5)
|
||||
response = await client.async_get("/health")
|
||||
chain_status[chain_id]["connected"] = response is not None
|
||||
except NetworkError:
|
||||
pass
|
||||
|
||||
return {
|
||||
|
||||
@@ -4,7 +4,6 @@ Simple AITBC Blockchain Explorer - Demonstrating the issues described in the ana
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import httpx
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional
|
||||
@@ -12,8 +11,15 @@ from fastapi import FastAPI, HTTPException
|
||||
from fastapi.responses import HTMLResponse
|
||||
import uvicorn
|
||||
|
||||
from aitbc.http_client import AsyncAITBCHTTPClient
|
||||
from aitbc.aitbc_logging import get_logger
|
||||
from aitbc.exceptions import NetworkError
|
||||
|
||||
app = FastAPI(title="Simple AITBC Explorer", version="0.1.0")
|
||||
|
||||
# Initialize logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# Configuration
|
||||
BLOCKCHAIN_RPC_URL = "http://localhost:8025"
|
||||
|
||||
@@ -174,12 +180,12 @@ HTML_TEMPLATE = """
|
||||
async def get_chain_head():
|
||||
"""Get current chain head"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{BLOCKCHAIN_RPC_URL}/rpc/head")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
print(f"Error getting chain head: {e}")
|
||||
client = AsyncAITBCHTTPClient(base_url=BLOCKCHAIN_RPC_URL, timeout=10)
|
||||
response = await client.async_get("/rpc/head")
|
||||
if response:
|
||||
return response
|
||||
except NetworkError as e:
|
||||
logger.error(f"Error getting chain head: {e}")
|
||||
return {"height": 0, "hash": "", "timestamp": None}
|
||||
|
||||
@app.get("/api/blocks/{height}")
|
||||
@@ -189,12 +195,12 @@ async def get_block(height: int):
|
||||
if height < 0 or height > 10000000:
|
||||
return {"height": height, "hash": "", "timestamp": None, "transactions": []}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{BLOCKCHAIN_RPC_URL}/rpc/blocks/{height}")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
print(f"Error getting block {height}: {e}")
|
||||
client = AsyncAITBCHTTPClient(base_url=BLOCKCHAIN_RPC_URL, timeout=10)
|
||||
response = await client.async_get(f"/rpc/blocks/{height}")
|
||||
if response:
|
||||
return response
|
||||
except NetworkError as e:
|
||||
logger.error(f"Error getting block: {e}")
|
||||
return {"height": height, "hash": "", "timestamp": None, "transactions": []}
|
||||
|
||||
@app.get("/api/transactions/{tx_hash}")
|
||||
@@ -203,26 +209,21 @@ async def get_transaction(tx_hash: str):
|
||||
if not validate_tx_hash(tx_hash):
|
||||
return {"hash": tx_hash, "from": "unknown", "to": "unknown", "amount": 0, "timestamp": None}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{BLOCKCHAIN_RPC_URL}/rpc/tx/{tx_hash}")
|
||||
if response.status_code == 200:
|
||||
tx_data = response.json()
|
||||
# Problem 2: Map RPC schema to UI schema
|
||||
return {
|
||||
"hash": tx_data.get("tx_hash", tx_hash), # tx_hash -> hash
|
||||
"from": tx_data.get("sender", "unknown"), # sender -> from
|
||||
"to": tx_data.get("recipient", "unknown"), # recipient -> to
|
||||
"amount": tx_data.get("payload", {}).get("value", "0"), # payload.value -> amount
|
||||
"fee": tx_data.get("payload", {}).get("fee", "0"), # payload.fee -> fee
|
||||
"timestamp": tx_data.get("created_at"), # created_at -> timestamp
|
||||
"block_height": tx_data.get("block_height", "pending")
|
||||
}
|
||||
elif response.status_code == 404:
|
||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
print(f"Error getting transaction {tx_hash}: {e}")
|
||||
client = AsyncAITBCHTTPClient(base_url=BLOCKCHAIN_RPC_URL, timeout=10)
|
||||
response = await client.async_get(f"/rpc/tx/{tx_hash}")
|
||||
if response:
|
||||
# Problem 2: Map RPC schema to UI schema
|
||||
return {
|
||||
"hash": response.get("tx_hash", tx_hash), # tx_hash -> hash
|
||||
"from": response.get("sender", "unknown"), # sender -> from
|
||||
"to": response.get("recipient", "unknown"), # recipient -> to
|
||||
"amount": response.get("payload", {}).get("value", "0"), # payload.value -> amount
|
||||
"fee": response.get("payload", {}).get("fee", "0"), # payload.fee -> fee
|
||||
"timestamp": response.get("created_at"), # created_at -> timestamp
|
||||
"block_height": response.get("block_height", "pending")
|
||||
}
|
||||
except NetworkError as e:
|
||||
logger.error(f"Error getting transaction {tx_hash}: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Failed to fetch transaction: {str(e)}")
|
||||
|
||||
# Missing: @app.get("/api/transactions/{tx_hash}") - THIS IS THE PROBLEM
|
||||
|
||||
@@ -16,6 +16,8 @@ from pathlib import Path
|
||||
import os
|
||||
import sys
|
||||
|
||||
from aitbc.constants import KEYSTORE_DIR
|
||||
|
||||
# Add CLI utils to path
|
||||
sys.path.insert(0, '/opt/aitbc/cli')
|
||||
|
||||
@@ -23,7 +25,7 @@ sys.path.insert(0, '/opt/aitbc/cli')
|
||||
app = FastAPI(title="AITBC Wallet Daemon", debug=False)
|
||||
|
||||
# Configuration
|
||||
KEYSTORE_PATH = Path("/var/lib/aitbc/keystore")
|
||||
KEYSTORE_PATH = KEYSTORE_DIR
|
||||
BLOCKCHAIN_RPC_URL = "http://localhost:8006"
|
||||
CHAIN_ID = "ait-mainnet"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user