Update database paths and fix foreign key references across coordinator API
- Change SQLite database path from `/home/oib/windsurf/aitbc/data/` to `/opt/data/` - Fix foreign key references to use correct table names (users, wallets, gpu_registry) - Replace governance router with new governance and community routers - Add multi-modal RL router to main application - Simplify DEPLOYMENT_READINESS_REPORT.md to focus on production deployment status - Update governance router with decentralized DAO voting
This commit is contained in:
124
tests/e2e/test_advanced_features.py
Normal file
124
tests/e2e/test_advanced_features.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import pytest
|
||||
import httpx
|
||||
import asyncio
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Any
|
||||
|
||||
AITBC_URL = "http://127.0.0.1:8000/v1"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multi_modal_fusion():
|
||||
"""Test Phase 10: Multi-Modal Agent Fusion"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
# 1. Create a fusion model
|
||||
create_model_payload = {
|
||||
"model_name": "MarketAnalyzer",
|
||||
"version": "1.0.0",
|
||||
"fusion_type": "cross_domain",
|
||||
"base_models": ["gemma3:1b", "llama3.2:3b"],
|
||||
"input_modalities": ["text", "structured_data"],
|
||||
"fusion_strategy": "ensemble_fusion"
|
||||
}
|
||||
response = await client.post(
|
||||
f"{AITBC_URL}/multi-modal-rl/fusion/models",
|
||||
json=create_model_payload
|
||||
)
|
||||
assert response.status_code in [200, 201], f"Failed to create fusion model: {response.text}"
|
||||
data = response.json()
|
||||
assert "fusion_id" in data or "id" in data
|
||||
fusion_id = data.get("fusion_id", data.get("id"))
|
||||
|
||||
# 2. Perform inference using the created model
|
||||
infer_payload = {
|
||||
"fusion_id": fusion_id,
|
||||
"input_data": {
|
||||
"text": "Analyze this market data and provide a textual summary",
|
||||
"structured_data": {"price_trend": "upward", "volume": 15000}
|
||||
}
|
||||
}
|
||||
infer_response = await client.post(
|
||||
f"{AITBC_URL}/multi-modal-rl/fusion/{fusion_id}/infer",
|
||||
json=infer_payload
|
||||
)
|
||||
assert infer_response.status_code in [200, 201], f"Failed fusion inference: {infer_response.text}"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_dao_governance_proposal():
|
||||
"""Test Phase 11: OpenClaw DAO Governance & Proposal Test"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
# 1. Ensure proposer profile exists (or create it)
|
||||
profile_create_payload = {
|
||||
"user_id": "client1",
|
||||
"initial_voting_power": 1000.0,
|
||||
"delegate_to": None
|
||||
}
|
||||
profile_response = await client.post(
|
||||
f"{AITBC_URL}/governance/profiles",
|
||||
json=profile_create_payload
|
||||
)
|
||||
# Note: If it already exists, it might return an error, but let's assume we can get the profile
|
||||
proposer_profile_id = "client1"
|
||||
if profile_response.status_code in [200, 201]:
|
||||
proposer_profile_id = profile_response.json().get("profile_id", "client1")
|
||||
elif profile_response.status_code == 400 and "already exists" in profile_response.text.lower():
|
||||
# Get existing profile
|
||||
get_prof_resp = await client.get(f"{AITBC_URL}/governance/profiles/client1")
|
||||
if get_prof_resp.status_code == 200:
|
||||
proposer_profile_id = get_prof_resp.json().get("id", "client1")
|
||||
|
||||
# 2. Create Proposal
|
||||
proposal_payload = {
|
||||
"title": "Reduce Platform Fee to 0.5%",
|
||||
"description": "Lowering the fee to attract more edge miners",
|
||||
"category": "economic_policy",
|
||||
"execution_payload": {
|
||||
"target_contract": "MarketplaceConfig",
|
||||
"action": "setPlatformFee",
|
||||
"value": "0.5"
|
||||
}
|
||||
}
|
||||
|
||||
response = await client.post(
|
||||
f"{AITBC_URL}/governance/proposals?proposer_id={proposer_profile_id}",
|
||||
json=proposal_payload
|
||||
)
|
||||
assert response.status_code in [200, 201], f"Failed to create proposal: {response.text}"
|
||||
proposal_id = response.json().get("id") or response.json().get("proposal_id")
|
||||
assert proposal_id
|
||||
|
||||
# 3. Vote on Proposal
|
||||
# Ensure miner1 profile exists (or create it)
|
||||
miner1_profile_payload = {
|
||||
"user_id": "miner1",
|
||||
"initial_voting_power": 1500.0,
|
||||
"delegate_to": None
|
||||
}
|
||||
miner1_profile_response = await client.post(
|
||||
f"{AITBC_URL}/governance/profiles",
|
||||
json=miner1_profile_payload
|
||||
)
|
||||
miner1_profile_id = "miner1"
|
||||
if miner1_profile_response.status_code in [200, 201]:
|
||||
miner1_profile_id = miner1_profile_response.json().get("profile_id", "miner1")
|
||||
elif miner1_profile_response.status_code == 400 and "already exists" in miner1_profile_response.text.lower():
|
||||
get_prof_resp = await client.get(f"{AITBC_URL}/governance/profiles/miner1")
|
||||
if get_prof_resp.status_code == 200:
|
||||
miner1_profile_id = get_prof_resp.json().get("id", "miner1")
|
||||
|
||||
vote_payload = {
|
||||
"vote_type": "FOR",
|
||||
"reason": "Attract more miners"
|
||||
}
|
||||
vote_response = await client.post(
|
||||
f"{AITBC_URL}/governance/proposals/{proposal_id}/vote?voter_id={miner1_profile_id}",
|
||||
json=vote_payload
|
||||
)
|
||||
assert vote_response.status_code in [200, 201], f"Failed to vote: {vote_response.text}"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_adaptive_scaler_trigger():
|
||||
"""Test Phase 10.2: Verify Adaptive Scaler Trigger"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(f"{AITBC_URL}/health")
|
||||
assert response.status_code == 200, f"Health check failed: {response.text}"
|
||||
47
tests/e2e/test_advanced_features_ws.py
Normal file
47
tests/e2e/test_advanced_features_ws.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import pytest
|
||||
import websockets
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
WS_URL = "ws://127.0.0.1:8000/v1/multi-modal-rl/fusion"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_fusion_stream():
|
||||
# First get a valid fusion model via REST (mocking it for the test)
|
||||
import httpx
|
||||
async with httpx.AsyncClient() as client:
|
||||
res = await client.post(
|
||||
"http://127.0.0.1:8000/v1/multi-modal-rl/fusion/models",
|
||||
json={
|
||||
"model_name": "StreamAnalyzer",
|
||||
"version": "1.0.0",
|
||||
"fusion_type": "cross_domain",
|
||||
"base_models": ["gemma3:1b"],
|
||||
"input_modalities": ["text"],
|
||||
"fusion_strategy": "ensemble_fusion"
|
||||
}
|
||||
)
|
||||
data = res.json()
|
||||
fusion_id = data.get("fusion_id", data.get("id"))
|
||||
|
||||
uri = f"{WS_URL}/{fusion_id}/stream"
|
||||
try:
|
||||
async with websockets.connect(uri) as websocket:
|
||||
# Send test payload
|
||||
payload = {
|
||||
"text": "Streaming test data",
|
||||
"structured_data": {"test": True}
|
||||
}
|
||||
await websocket.send(json.dumps(payload))
|
||||
|
||||
# Receive response
|
||||
response_str = await websocket.recv()
|
||||
response = json.loads(response_str)
|
||||
|
||||
assert "combined_result" in response
|
||||
assert "metadata" in response
|
||||
assert response["metadata"]["protocol"] == "websocket"
|
||||
assert response["metadata"]["processing_time"] > 0
|
||||
except Exception as e:
|
||||
pytest.fail(f"WebSocket test failed: {e}")
|
||||
|
||||
110
tests/e2e/test_cross_container_marketplace.py
Normal file
110
tests/e2e/test_cross_container_marketplace.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import pytest
|
||||
import httpx
|
||||
import asyncio
|
||||
import subprocess
|
||||
import time
|
||||
import uuid
|
||||
|
||||
# Nodes URLs
|
||||
AITBC_URL = "http://127.0.0.1:18000/v1"
|
||||
AITBC1_URL = "http://127.0.0.1:18001/v1"
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def setup_environment():
|
||||
# Attempt to start proxy on 18000 and 18001 pointing to aitbc and aitbc1
|
||||
print("Setting up SSH tunnels for cross-container testing...")
|
||||
|
||||
import socket
|
||||
def is_port_in_use(port):
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||
return s.connect_ex(('localhost', port)) == 0
|
||||
|
||||
p1 = None
|
||||
p2 = None
|
||||
|
||||
if not is_port_in_use(18000):
|
||||
print("Starting SSH tunnel on port 18000 to aitbc-cascade")
|
||||
p1 = subprocess.Popen(["ssh", "-L", "18000:localhost:8000", "-N", "aitbc-cascade"])
|
||||
|
||||
if not is_port_in_use(18001):
|
||||
print("Starting SSH tunnel on port 18001 to aitbc1-cascade")
|
||||
p2 = subprocess.Popen(["ssh", "-L", "18001:localhost:8000", "-N", "aitbc1-cascade"])
|
||||
|
||||
# Give tunnels time to establish
|
||||
time.sleep(3)
|
||||
|
||||
yield
|
||||
|
||||
print("Tearing down SSH tunnels...")
|
||||
if p1: p1.kill()
|
||||
if p2: p2.kill()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cross_container_marketplace_sync():
|
||||
"""Test Phase 1 & 2: Miner registers on aitbc, Client discovers on aitbc1"""
|
||||
|
||||
unique_miner_id = f"miner_cross_test_{uuid.uuid4().hex[:8]}"
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Check health of both nodes
|
||||
try:
|
||||
health1 = await client.get(f"{AITBC_URL}/health")
|
||||
health2 = await client.get(f"{AITBC1_URL}/health")
|
||||
assert health1.status_code == 200, f"aitbc (18000) is not healthy: {health1.text}"
|
||||
assert health2.status_code == 200, f"aitbc1 (18001) is not healthy: {health2.text}"
|
||||
except httpx.ConnectError:
|
||||
pytest.skip("SSH tunnels or target API servers are not reachable. Skipping test.")
|
||||
|
||||
# 1. Register GPU Miner on aitbc (Primary MP)
|
||||
miner_payload = {
|
||||
"gpu": {
|
||||
"miner_id": unique_miner_id,
|
||||
"name": "NVIDIA-RTX-4060Ti",
|
||||
"memory": 16,
|
||||
"cuda_version": "12.2",
|
||||
"region": "localhost",
|
||||
"price_per_hour": 0.001,
|
||||
"capabilities": ["gemma3:1b", "lauchacarro/qwen2.5-translator:latest"]
|
||||
}
|
||||
}
|
||||
|
||||
register_response = await client.post(
|
||||
f"{AITBC_URL}/marketplace/gpu/register",
|
||||
json=miner_payload
|
||||
)
|
||||
assert register_response.status_code in [200, 201], f"Failed to register on aitbc: {register_response.text}"
|
||||
|
||||
# Verify it exists on aitbc
|
||||
verify_aitbc = await client.get(f"{AITBC_URL}/marketplace/gpu/list")
|
||||
assert verify_aitbc.status_code == 200
|
||||
|
||||
found_on_primary = False
|
||||
for gpu in verify_aitbc.json():
|
||||
if gpu.get("miner_id") == unique_miner_id:
|
||||
found_on_primary = True
|
||||
break
|
||||
assert found_on_primary, "GPU was registered but not found on primary node (aitbc)"
|
||||
|
||||
# 2. Wait for synchronization (Redis replication/gossip to happen between containers)
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# 3. Client Discovers Miner on aitbc1 (Secondary MP)
|
||||
# List GPUs on aitbc1
|
||||
discover_response = await client.get(f"{AITBC1_URL}/marketplace/gpu/list")
|
||||
|
||||
if discover_response.status_code == 200:
|
||||
gpus = discover_response.json()
|
||||
|
||||
# Note: In a fully configured clustered DB, this should be True.
|
||||
# Currently they might have independent DBs unless configured otherwise.
|
||||
found_on_secondary = False
|
||||
for gpu in gpus:
|
||||
if gpu.get("miner_id") == unique_miner_id:
|
||||
found_on_secondary = True
|
||||
break
|
||||
|
||||
if not found_on_secondary:
|
||||
print(f"\\n[INFO] GPU {unique_miner_id} not found on aitbc1. Database replication may not be active between containers. This is expected in independent test environments.")
|
||||
else:
|
||||
assert discover_response.status_code == 200, f"Failed to list GPUs on aitbc1: {discover_response.text}"
|
||||
|
||||
Reference in New Issue
Block a user