feat(coordinator-api): integrate dynamic pricing engine with GPU marketplace and add agent identity router
- Add DynamicPricingEngine and MarketDataCollector dependencies to GPU marketplace endpoints
- Implement dynamic pricing calculation for GPU registration with market_balance strategy
- Calculate real-time dynamic prices at booking time with confidence scores and pricing factors
- Enhance /marketplace/pricing/{model} endpoint with comprehensive dynamic pricing analysis
- Add static vs dynamic price
This commit is contained in:
@@ -14,6 +14,7 @@ from .payments import router as payments
|
||||
from .web_vitals import router as web_vitals
|
||||
from .edge_gpu import router as edge_gpu
|
||||
from .cache_management import router as cache_management
|
||||
from .agent_identity import router as agent_identity
|
||||
# from .registry import router as registry
|
||||
|
||||
__all__ = [
|
||||
@@ -31,5 +32,6 @@ __all__ = [
|
||||
"web_vitals",
|
||||
"edge_gpu",
|
||||
"cache_management",
|
||||
"agent_identity",
|
||||
"registry",
|
||||
]
|
||||
|
||||
565
apps/coordinator-api/src/app/routers/agent_identity.py
Normal file
565
apps/coordinator-api/src/app/routers/agent_identity.py
Normal file
@@ -0,0 +1,565 @@
|
||||
"""
|
||||
Agent Identity API Router
|
||||
REST API endpoints for agent identity management and cross-chain operations
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime
|
||||
from sqlmodel import Field
|
||||
|
||||
from ..domain.agent_identity import (
|
||||
AgentIdentity, CrossChainMapping, IdentityVerification, AgentWallet,
|
||||
IdentityStatus, VerificationType, ChainType,
|
||||
AgentIdentityCreate, AgentIdentityUpdate, CrossChainMappingCreate,
|
||||
CrossChainMappingUpdate, IdentityVerificationCreate, AgentWalletCreate,
|
||||
AgentWalletUpdate, AgentIdentityResponse, CrossChainMappingResponse,
|
||||
AgentWalletResponse
|
||||
)
|
||||
from ..services.database import get_session
|
||||
from .manager import AgentIdentityManager
|
||||
|
||||
router = APIRouter(prefix="/agent-identity", tags=["Agent Identity"])
|
||||
|
||||
|
||||
def get_identity_manager(session=Depends(get_session)) -> AgentIdentityManager:
|
||||
"""Dependency injection for AgentIdentityManager"""
|
||||
return AgentIdentityManager(session)
|
||||
|
||||
|
||||
# Identity Management Endpoints
|
||||
|
||||
@router.post("/identities", response_model=Dict[str, Any])
|
||||
async def create_agent_identity(
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Create a new agent identity with cross-chain mappings"""
|
||||
try:
|
||||
result = await manager.create_agent_identity(
|
||||
owner_address=request['owner_address'],
|
||||
chains=request['chains'],
|
||||
display_name=request.get('display_name', ''),
|
||||
description=request.get('description', ''),
|
||||
metadata=request.get('metadata'),
|
||||
tags=request.get('tags')
|
||||
)
|
||||
return JSONResponse(content=result, status_code=201)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}", response_model=Dict[str, Any])
|
||||
async def get_agent_identity(
|
||||
agent_id: str,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Get comprehensive agent identity summary"""
|
||||
try:
|
||||
result = await manager.get_agent_identity_summary(agent_id)
|
||||
if 'error' in result:
|
||||
raise HTTPException(status_code=404, detail=result['error'])
|
||||
return result
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.put("/identities/{agent_id}", response_model=Dict[str, Any])
|
||||
async def update_agent_identity(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Update agent identity and related components"""
|
||||
try:
|
||||
result = await manager.update_agent_identity(agent_id, request)
|
||||
if not result.get('update_successful', True):
|
||||
raise HTTPException(status_code=400, detail=result.get('error', 'Update failed'))
|
||||
return result
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/deactivate", response_model=Dict[str, Any])
|
||||
async def deactivate_agent_identity(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Deactivate an agent identity across all chains"""
|
||||
try:
|
||||
reason = request.get('reason', '')
|
||||
success = await manager.deactivate_agent_identity(agent_id, reason)
|
||||
if not success:
|
||||
raise HTTPException(status_code=400, detail='Deactivation failed')
|
||||
return {
|
||||
'agent_id': agent_id,
|
||||
'deactivated': True,
|
||||
'reason': reason,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# Cross-Chain Mapping Endpoints
|
||||
|
||||
@router.post("/identities/{agent_id}/cross-chain/register", response_model=Dict[str, Any])
|
||||
async def register_cross_chain_identity(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Register cross-chain identity mappings"""
|
||||
try:
|
||||
chain_mappings = request['chain_mappings']
|
||||
verifier_address = request.get('verifier_address')
|
||||
verification_type = VerificationType(request.get('verification_type', 'basic'))
|
||||
|
||||
# Use registry directly for this operation
|
||||
result = await manager.registry.register_cross_chain_identity(
|
||||
agent_id,
|
||||
chain_mappings,
|
||||
verifier_address,
|
||||
verification_type
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}/cross-chain/mapping", response_model=List[CrossChainMappingResponse])
|
||||
async def get_cross_chain_mapping(
|
||||
agent_id: str,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Get all cross-chain mappings for an agent"""
|
||||
try:
|
||||
mappings = await manager.registry.get_all_cross_chain_mappings(agent_id)
|
||||
return [
|
||||
CrossChainMappingResponse(
|
||||
id=m.id,
|
||||
agent_id=m.agent_id,
|
||||
chain_id=m.chain_id,
|
||||
chain_type=m.chain_type,
|
||||
chain_address=m.chain_address,
|
||||
is_verified=m.is_verified,
|
||||
verified_at=m.verified_at,
|
||||
wallet_address=m.wallet_address,
|
||||
wallet_type=m.wallet_type,
|
||||
chain_metadata=m.chain_metadata,
|
||||
last_transaction=m.last_transaction,
|
||||
transaction_count=m.transaction_count,
|
||||
created_at=m.created_at,
|
||||
updated_at=m.updated_at
|
||||
)
|
||||
for m in mappings
|
||||
]
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.put("/identities/{agent_id}/cross-chain/{chain_id}", response_model=Dict[str, Any])
|
||||
async def update_cross_chain_mapping(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Update cross-chain mapping for a specific chain"""
|
||||
try:
|
||||
new_address = request.get('new_address')
|
||||
verifier_address = request.get('verifier_address')
|
||||
|
||||
if not new_address:
|
||||
raise HTTPException(status_code=400, detail='new_address is required')
|
||||
|
||||
success = await manager.registry.update_identity_mapping(
|
||||
agent_id,
|
||||
chain_id,
|
||||
new_address,
|
||||
verifier_address
|
||||
)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=400, detail='Update failed')
|
||||
|
||||
return {
|
||||
'agent_id': agent_id,
|
||||
'chain_id': chain_id,
|
||||
'new_address': new_address,
|
||||
'updated': True,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/cross-chain/{chain_id}/verify", response_model=Dict[str, Any])
|
||||
async def verify_cross_chain_identity(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Verify identity on a specific blockchain"""
|
||||
try:
|
||||
# Get identity ID
|
||||
identity = await manager.core.get_identity_by_agent_id(agent_id)
|
||||
if not identity:
|
||||
raise HTTPException(status_code=404, detail='Agent identity not found')
|
||||
|
||||
verification = await manager.registry.verify_cross_chain_identity(
|
||||
identity.id,
|
||||
chain_id,
|
||||
request['verifier_address'],
|
||||
request['proof_hash'],
|
||||
request.get('proof_data', {}),
|
||||
VerificationType(request.get('verification_type', 'basic'))
|
||||
)
|
||||
|
||||
return {
|
||||
'verification_id': verification.id,
|
||||
'agent_id': agent_id,
|
||||
'chain_id': chain_id,
|
||||
'verification_type': verification.verification_type,
|
||||
'verified': True,
|
||||
'timestamp': verification.created_at.isoformat()
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/migrate", response_model=Dict[str, Any])
|
||||
async def migrate_agent_identity(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Migrate agent identity from one chain to another"""
|
||||
try:
|
||||
result = await manager.migrate_agent_identity(
|
||||
agent_id,
|
||||
request['from_chain'],
|
||||
request['to_chain'],
|
||||
request['new_address'],
|
||||
request.get('verifier_address')
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
# Wallet Management Endpoints
|
||||
|
||||
@router.post("/identities/{agent_id}/wallets", response_model=Dict[str, Any])
|
||||
async def create_agent_wallet(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Create an agent wallet on a specific blockchain"""
|
||||
try:
|
||||
wallet = await manager.wallet_adapter.create_agent_wallet(
|
||||
agent_id,
|
||||
request['chain_id'],
|
||||
request.get('owner_address', '')
|
||||
)
|
||||
|
||||
return {
|
||||
'wallet_id': wallet.id,
|
||||
'agent_id': agent_id,
|
||||
'chain_id': wallet.chain_id,
|
||||
'chain_address': wallet.chain_address,
|
||||
'wallet_type': wallet.wallet_type,
|
||||
'contract_address': wallet.contract_address,
|
||||
'created_at': wallet.created_at.isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}/wallets/{chain_id}/balance", response_model=Dict[str, Any])
|
||||
async def get_wallet_balance(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Get wallet balance for an agent on a specific chain"""
|
||||
try:
|
||||
balance = await manager.wallet_adapter.get_wallet_balance(agent_id, chain_id)
|
||||
return {
|
||||
'agent_id': agent_id,
|
||||
'chain_id': chain_id,
|
||||
'balance': str(balance),
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/wallets/{chain_id}/transactions", response_model=Dict[str, Any])
|
||||
async def execute_wallet_transaction(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
request: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Execute a transaction from agent wallet"""
|
||||
try:
|
||||
from decimal import Decimal
|
||||
|
||||
result = await manager.wallet_adapter.execute_wallet_transaction(
|
||||
agent_id,
|
||||
chain_id,
|
||||
request['to_address'],
|
||||
Decimal(str(request['amount'])),
|
||||
request.get('data')
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}/wallets/{chain_id}/transactions", response_model=List[Dict[str, Any]])
|
||||
async def get_wallet_transaction_history(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
limit: int = Query(default=50, ge=1, le=1000),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Get transaction history for agent wallet"""
|
||||
try:
|
||||
history = await manager.wallet_adapter.get_wallet_transaction_history(
|
||||
agent_id,
|
||||
chain_id,
|
||||
limit,
|
||||
offset
|
||||
)
|
||||
return history
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}/wallets", response_model=Dict[str, Any])
|
||||
async def get_all_agent_wallets(
|
||||
agent_id: str,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Get all wallets for an agent across all chains"""
|
||||
try:
|
||||
wallets = await manager.wallet_adapter.get_all_agent_wallets(agent_id)
|
||||
stats = await manager.wallet_adapter.get_wallet_statistics(agent_id)
|
||||
|
||||
return {
|
||||
'agent_id': agent_id,
|
||||
'wallets': [
|
||||
{
|
||||
'id': w.id,
|
||||
'chain_id': w.chain_id,
|
||||
'chain_address': w.chain_address,
|
||||
'wallet_type': w.wallet_type,
|
||||
'contract_address': w.contract_address,
|
||||
'balance': w.balance,
|
||||
'spending_limit': w.spending_limit,
|
||||
'total_spent': w.total_spent,
|
||||
'is_active': w.is_active,
|
||||
'transaction_count': w.transaction_count,
|
||||
'last_transaction': w.last_transaction.isoformat() if w.last_transaction else None,
|
||||
'created_at': w.created_at.isoformat(),
|
||||
'updated_at': w.updated_at.isoformat()
|
||||
}
|
||||
for w in wallets
|
||||
],
|
||||
'statistics': stats
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# Search and Discovery Endpoints
|
||||
|
||||
@router.get("/identities/search", response_model=Dict[str, Any])
|
||||
async def search_agent_identities(
|
||||
query: str = Query(default="", description="Search query"),
|
||||
chains: Optional[List[int]] = Query(default=None, description="Filter by chain IDs"),
|
||||
status: Optional[IdentityStatus] = Query(default=None, description="Filter by status"),
|
||||
verification_level: Optional[VerificationType] = Query(default=None, description="Filter by verification level"),
|
||||
min_reputation: Optional[float] = Query(default=None, ge=0, le=100, description="Minimum reputation score"),
|
||||
limit: int = Query(default=50, ge=1, le=100),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Search agent identities with advanced filters"""
|
||||
try:
|
||||
result = await manager.search_agent_identities(
|
||||
query=query,
|
||||
chains=chains,
|
||||
status=status,
|
||||
verification_level=verification_level,
|
||||
min_reputation=min_reputation,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/sync-reputation", response_model=Dict[str, Any])
|
||||
async def sync_agent_reputation(
|
||||
agent_id: str,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Sync agent reputation across all chains"""
|
||||
try:
|
||||
result = await manager.sync_agent_reputation(agent_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# Utility Endpoints
|
||||
|
||||
@router.get("/registry/health", response_model=Dict[str, Any])
|
||||
async def get_registry_health(manager: AgentIdentityManager = Depends(get_identity_manager)):
|
||||
"""Get health status of the identity registry"""
|
||||
try:
|
||||
result = await manager.get_registry_health()
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/registry/statistics", response_model=Dict[str, Any])
|
||||
async def get_registry_statistics(manager: AgentIdentityManager = Depends(get_identity_manager)):
|
||||
"""Get comprehensive registry statistics"""
|
||||
try:
|
||||
result = await manager.registry.get_registry_statistics()
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/chains/supported", response_model=List[Dict[str, Any]])
|
||||
async def get_supported_chains(manager: AgentIdentityManager = Depends(get_identity_manager)):
|
||||
"""Get list of supported blockchains"""
|
||||
try:
|
||||
chains = manager.wallet_adapter.get_supported_chains()
|
||||
return chains
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/{agent_id}/export", response_model=Dict[str, Any])
|
||||
async def export_agent_identity(
|
||||
agent_id: str,
|
||||
request: Dict[str, Any] = None,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Export agent identity data for backup or migration"""
|
||||
try:
|
||||
format_type = (request or {}).get('format', 'json')
|
||||
result = await manager.export_agent_identity(agent_id, format_type)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/import", response_model=Dict[str, Any])
|
||||
async def import_agent_identity(
|
||||
export_data: Dict[str, Any],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Import agent identity data from backup or migration"""
|
||||
try:
|
||||
result = await manager.import_agent_identity(export_data)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/registry/cleanup-expired", response_model=Dict[str, Any])
|
||||
async def cleanup_expired_verifications(manager: AgentIdentityManager = Depends(get_identity_manager)):
|
||||
"""Clean up expired verification records"""
|
||||
try:
|
||||
cleaned_count = await manager.registry.cleanup_expired_verifications()
|
||||
return {
|
||||
'cleaned_verifications': cleaned_count,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/identities/batch-verify", response_model=List[Dict[str, Any]])
|
||||
async def batch_verify_identities(
|
||||
verifications: List[Dict[str, Any]],
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Batch verify multiple identities"""
|
||||
try:
|
||||
results = await manager.registry.batch_verify_identities(verifications)
|
||||
return results
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/identities/{agent_id}/resolve/{chain_id}", response_model=Dict[str, Any])
|
||||
async def resolve_agent_identity(
|
||||
agent_id: str,
|
||||
chain_id: int,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Resolve agent identity to chain-specific address"""
|
||||
try:
|
||||
address = await manager.registry.resolve_agent_identity(agent_id, chain_id)
|
||||
if not address:
|
||||
raise HTTPException(status_code=404, detail='Identity mapping not found')
|
||||
|
||||
return {
|
||||
'agent_id': agent_id,
|
||||
'chain_id': chain_id,
|
||||
'address': address,
|
||||
'resolved': True
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/address/{chain_address}/resolve/{chain_id}", response_model=Dict[str, Any])
|
||||
async def resolve_address_to_agent(
|
||||
chain_address: str,
|
||||
chain_id: int,
|
||||
manager: AgentIdentityManager = Depends(get_identity_manager)
|
||||
):
|
||||
"""Resolve chain address back to agent ID"""
|
||||
try:
|
||||
agent_id = await manager.registry.resolve_agent_identity_by_address(chain_address, chain_id)
|
||||
if not agent_id:
|
||||
raise HTTPException(status_code=404, detail='Address mapping not found')
|
||||
|
||||
return {
|
||||
'chain_address': chain_address,
|
||||
'chain_id': chain_id,
|
||||
'agent_id': agent_id,
|
||||
'resolved': True
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
762
apps/coordinator-api/src/app/routers/dynamic_pricing.py
Normal file
762
apps/coordinator-api/src/app/routers/dynamic_pricing.py
Normal file
@@ -0,0 +1,762 @@
|
||||
"""
|
||||
Dynamic Pricing API Router
|
||||
Provides RESTful endpoints for dynamic pricing management
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||
from fastapi import status as http_status
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlmodel import select, func
|
||||
|
||||
from ..storage import SessionDep
|
||||
from ..services.dynamic_pricing_engine import (
|
||||
DynamicPricingEngine,
|
||||
PricingStrategy,
|
||||
ResourceType,
|
||||
PriceConstraints,
|
||||
PriceTrend
|
||||
)
|
||||
from ..services.market_data_collector import MarketDataCollector
|
||||
from ..domain.pricing_strategies import StrategyLibrary, PricingStrategyConfig
|
||||
from ..schemas.pricing import (
|
||||
DynamicPriceRequest,
|
||||
DynamicPriceResponse,
|
||||
PriceForecast,
|
||||
PricingStrategyRequest,
|
||||
PricingStrategyResponse,
|
||||
MarketAnalysisResponse,
|
||||
PricingRecommendation,
|
||||
PriceHistoryResponse,
|
||||
BulkPricingUpdateRequest,
|
||||
BulkPricingUpdateResponse
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/v1/pricing", tags=["dynamic-pricing"])
|
||||
|
||||
# Global instances (in production, these would be dependency injected)
|
||||
pricing_engine = None
|
||||
market_collector = None
|
||||
|
||||
|
||||
async def get_pricing_engine() -> DynamicPricingEngine:
|
||||
"""Get pricing engine instance"""
|
||||
global pricing_engine
|
||||
if pricing_engine is None:
|
||||
pricing_engine = DynamicPricingEngine({
|
||||
"min_price": 0.001,
|
||||
"max_price": 1000.0,
|
||||
"update_interval": 300,
|
||||
"forecast_horizon": 72
|
||||
})
|
||||
await pricing_engine.initialize()
|
||||
return pricing_engine
|
||||
|
||||
|
||||
async def get_market_collector() -> MarketDataCollector:
|
||||
"""Get market data collector instance"""
|
||||
global market_collector
|
||||
if market_collector is None:
|
||||
market_collector = MarketDataCollector({
|
||||
"websocket_port": 8765
|
||||
})
|
||||
await market_collector.initialize()
|
||||
return market_collector
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Core Pricing Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/dynamic/{resource_type}/{resource_id}", response_model=DynamicPriceResponse)
|
||||
async def get_dynamic_price(
|
||||
resource_type: str,
|
||||
resource_id: str,
|
||||
strategy: Optional[str] = Query(default=None),
|
||||
region: str = Query(default="global"),
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> DynamicPriceResponse:
|
||||
"""Get current dynamic price for a resource"""
|
||||
|
||||
try:
|
||||
# Validate resource type
|
||||
try:
|
||||
resource_enum = ResourceType(resource_type.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid resource type: {resource_type}"
|
||||
)
|
||||
|
||||
# Get base price (in production, this would come from database)
|
||||
base_price = 0.05 # Default base price
|
||||
|
||||
# Parse strategy if provided
|
||||
strategy_enum = None
|
||||
if strategy:
|
||||
try:
|
||||
strategy_enum = PricingStrategy(strategy.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid strategy: {strategy}"
|
||||
)
|
||||
|
||||
# Calculate dynamic price
|
||||
result = await engine.calculate_dynamic_price(
|
||||
resource_id=resource_id,
|
||||
resource_type=resource_enum,
|
||||
base_price=base_price,
|
||||
strategy=strategy_enum,
|
||||
region=region
|
||||
)
|
||||
|
||||
return DynamicPriceResponse(
|
||||
resource_id=result.resource_id,
|
||||
resource_type=result.resource_type.value,
|
||||
current_price=result.current_price,
|
||||
recommended_price=result.recommended_price,
|
||||
price_trend=result.price_trend.value,
|
||||
confidence_score=result.confidence_score,
|
||||
factors_exposed=result.factors_exposed,
|
||||
reasoning=result.reasoning,
|
||||
next_update=result.next_update,
|
||||
strategy_used=result.strategy_used.value
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to calculate dynamic price: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/forecast/{resource_type}/{resource_id}", response_model=PriceForecast)
|
||||
async def get_price_forecast(
|
||||
resource_type: str,
|
||||
resource_id: str,
|
||||
hours: int = Query(default=24, ge=1, le=168), # 1 hour to 1 week
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> PriceForecast:
|
||||
"""Get pricing forecast for next N hours"""
|
||||
|
||||
try:
|
||||
# Validate resource type
|
||||
try:
|
||||
ResourceType(resource_type.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid resource type: {resource_type}"
|
||||
)
|
||||
|
||||
# Get forecast
|
||||
forecast_points = await engine.get_price_forecast(resource_id, hours)
|
||||
|
||||
return PriceForecast(
|
||||
resource_id=resource_id,
|
||||
resource_type=resource_type,
|
||||
forecast_hours=hours,
|
||||
time_points=[
|
||||
{
|
||||
"timestamp": point.timestamp.isoformat(),
|
||||
"price": point.price,
|
||||
"demand_level": point.demand_level,
|
||||
"supply_level": point.supply_level,
|
||||
"confidence": point.confidence,
|
||||
"strategy_used": point.strategy_used
|
||||
}
|
||||
for point in forecast_points
|
||||
],
|
||||
accuracy_score=sum(point.confidence for point in forecast_points) / len(forecast_points) if forecast_points else 0.0,
|
||||
generated_at=datetime.utcnow().isoformat()
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to generate price forecast: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Strategy Management Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("/strategy/{provider_id}", response_model=PricingStrategyResponse)
|
||||
async def set_pricing_strategy(
|
||||
provider_id: str,
|
||||
request: PricingStrategyRequest,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> PricingStrategyResponse:
|
||||
"""Set pricing strategy for a provider"""
|
||||
|
||||
try:
|
||||
# Validate strategy
|
||||
try:
|
||||
strategy_enum = PricingStrategy(request.strategy.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid strategy: {request.strategy}"
|
||||
)
|
||||
|
||||
# Parse constraints
|
||||
constraints = None
|
||||
if request.constraints:
|
||||
constraints = PriceConstraints(
|
||||
min_price=request.constraints.get("min_price"),
|
||||
max_price=request.constraints.get("max_price"),
|
||||
max_change_percent=request.constraints.get("max_change_percent", 0.5),
|
||||
min_change_interval=request.constraints.get("min_change_interval", 300),
|
||||
strategy_lock_period=request.constraints.get("strategy_lock_period", 3600)
|
||||
)
|
||||
|
||||
# Set strategy
|
||||
success = await engine.set_provider_strategy(provider_id, strategy_enum, constraints)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to set pricing strategy"
|
||||
)
|
||||
|
||||
return PricingStrategyResponse(
|
||||
provider_id=provider_id,
|
||||
strategy=request.strategy,
|
||||
constraints=request.constraints,
|
||||
set_at=datetime.utcnow().isoformat(),
|
||||
status="active"
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to set pricing strategy: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/strategy/{provider_id}", response_model=PricingStrategyResponse)
|
||||
async def get_pricing_strategy(
|
||||
provider_id: str,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> PricingStrategyResponse:
|
||||
"""Get current pricing strategy for a provider"""
|
||||
|
||||
try:
|
||||
# Get strategy from engine
|
||||
if provider_id not in engine.provider_strategies:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No strategy found for provider {provider_id}"
|
||||
)
|
||||
|
||||
strategy = engine.provider_strategies[provider_id]
|
||||
constraints = engine.price_constraints.get(provider_id)
|
||||
|
||||
constraints_dict = None
|
||||
if constraints:
|
||||
constraints_dict = {
|
||||
"min_price": constraints.min_price,
|
||||
"max_price": constraints.max_price,
|
||||
"max_change_percent": constraints.max_change_percent,
|
||||
"min_change_interval": constraints.min_change_interval,
|
||||
"strategy_lock_period": constraints.strategy_lock_period
|
||||
}
|
||||
|
||||
return PricingStrategyResponse(
|
||||
provider_id=provider_id,
|
||||
strategy=strategy.value,
|
||||
constraints=constraints_dict,
|
||||
set_at=datetime.utcnow().isoformat(),
|
||||
status="active"
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get pricing strategy: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/strategies/available", response_model=List[Dict[str, Any]])
|
||||
async def get_available_strategies() -> List[Dict[str, Any]]:
|
||||
"""Get list of available pricing strategies"""
|
||||
|
||||
try:
|
||||
strategies = []
|
||||
|
||||
for strategy_type, config in StrategyLibrary.get_all_strategies().items():
|
||||
strategies.append({
|
||||
"strategy": strategy_type.value,
|
||||
"name": config.name,
|
||||
"description": config.description,
|
||||
"risk_tolerance": config.risk_tolerance.value,
|
||||
"priority": config.priority.value,
|
||||
"parameters": {
|
||||
"base_multiplier": config.parameters.base_multiplier,
|
||||
"demand_sensitivity": config.parameters.demand_sensitivity,
|
||||
"competition_sensitivity": config.parameters.competition_sensitivity,
|
||||
"max_price_change_percent": config.parameters.max_price_change_percent
|
||||
}
|
||||
})
|
||||
|
||||
return strategies
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get available strategies: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Market Analysis Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/market-analysis", response_model=MarketAnalysisResponse)
|
||||
async def get_market_analysis(
|
||||
region: str = Query(default="global"),
|
||||
resource_type: str = Query(default="gpu"),
|
||||
collector: MarketDataCollector = Depends(get_market_collector)
|
||||
) -> MarketAnalysisResponse:
|
||||
"""Get comprehensive market pricing analysis"""
|
||||
|
||||
try:
|
||||
# Validate resource type
|
||||
try:
|
||||
ResourceType(resource_type.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid resource type: {resource_type}"
|
||||
)
|
||||
|
||||
# Get aggregated market data
|
||||
market_data = await collector.get_aggregated_data(resource_type, region)
|
||||
|
||||
if not market_data:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No market data available for {resource_type} in {region}"
|
||||
)
|
||||
|
||||
# Get recent data for trend analysis
|
||||
recent_gpu_data = await collector.get_recent_data("gpu_metrics", 60)
|
||||
recent_booking_data = await collector.get_recent_data("booking_data", 60)
|
||||
|
||||
# Calculate trends
|
||||
demand_trend = "stable"
|
||||
supply_trend = "stable"
|
||||
price_trend = "stable"
|
||||
|
||||
if len(recent_booking_data) > 1:
|
||||
recent_demand = [point.metadata.get("demand_level", 0.5) for point in recent_booking_data[-10:]]
|
||||
if recent_demand:
|
||||
avg_recent = sum(recent_demand[-5:]) / 5
|
||||
avg_older = sum(recent_demand[:5]) / 5
|
||||
change = (avg_recent - avg_older) / avg_older if avg_older > 0 else 0
|
||||
|
||||
if change > 0.1:
|
||||
demand_trend = "increasing"
|
||||
elif change < -0.1:
|
||||
demand_trend = "decreasing"
|
||||
|
||||
# Generate recommendations
|
||||
recommendations = []
|
||||
|
||||
if market_data.demand_level > 0.8:
|
||||
recommendations.append("High demand detected - consider premium pricing")
|
||||
|
||||
if market_data.supply_level < 0.3:
|
||||
recommendations.append("Low supply detected - prices may increase")
|
||||
|
||||
if market_data.price_volatility > 0.2:
|
||||
recommendations.append("High price volatility - consider stable pricing strategy")
|
||||
|
||||
if market_data.utilization_rate > 0.9:
|
||||
recommendations.append("High utilization - capacity constraints may affect pricing")
|
||||
|
||||
return MarketAnalysisResponse(
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
current_conditions={
|
||||
"demand_level": market_data.demand_level,
|
||||
"supply_level": market_data.supply_level,
|
||||
"average_price": market_data.average_price,
|
||||
"price_volatility": market_data.price_volatility,
|
||||
"utilization_rate": market_data.utilization_rate,
|
||||
"market_sentiment": market_data.market_sentiment
|
||||
},
|
||||
trends={
|
||||
"demand_trend": demand_trend,
|
||||
"supply_trend": supply_trend,
|
||||
"price_trend": price_trend
|
||||
},
|
||||
competitor_analysis={
|
||||
"average_competitor_price": sum(market_data.competitor_prices) / len(market_data.competitor_prices) if market_data.competitor_prices else 0,
|
||||
"price_range": {
|
||||
"min": min(market_data.competitor_prices) if market_data.competitor_prices else 0,
|
||||
"max": max(market_data.competitor_prices) if market_data.competitor_prices else 0
|
||||
},
|
||||
"competitor_count": len(market_data.competitor_prices)
|
||||
},
|
||||
recommendations=recommendations,
|
||||
confidence_score=market_data.confidence_score,
|
||||
analysis_timestamp=market_data.timestamp.isoformat()
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get market analysis: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Recommendations Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/recommendations/{provider_id}", response_model=List[PricingRecommendation])
|
||||
async def get_pricing_recommendations(
|
||||
provider_id: str,
|
||||
resource_type: str = Query(default="gpu"),
|
||||
region: str = Query(default="global"),
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine),
|
||||
collector: MarketDataCollector = Depends(get_market_collector)
|
||||
) -> List[PricingRecommendation]:
|
||||
"""Get pricing optimization recommendations for a provider"""
|
||||
|
||||
try:
|
||||
# Validate resource type
|
||||
try:
|
||||
ResourceType(resource_type.lower())
|
||||
except ValueError:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid resource type: {resource_type}"
|
||||
)
|
||||
|
||||
recommendations = []
|
||||
|
||||
# Get market data
|
||||
market_data = await collector.get_aggregated_data(resource_type, region)
|
||||
|
||||
if not market_data:
|
||||
return []
|
||||
|
||||
# Get provider's current strategy
|
||||
current_strategy = engine.provider_strategies.get(provider_id, PricingStrategy.MARKET_BALANCE)
|
||||
|
||||
# Generate recommendations based on market conditions
|
||||
if market_data.demand_level > 0.8 and market_data.supply_level < 0.4:
|
||||
recommendations.append(PricingRecommendation(
|
||||
type="strategy_change",
|
||||
title="Switch to Profit Maximization",
|
||||
description="High demand and low supply conditions favor profit maximization strategy",
|
||||
impact="high",
|
||||
confidence=0.85,
|
||||
action="Set strategy to profit_maximization",
|
||||
expected_outcome="+15-25% revenue increase"
|
||||
))
|
||||
|
||||
if market_data.price_volatility > 0.25:
|
||||
recommendations.append(PricingRecommendation(
|
||||
type="risk_management",
|
||||
title="Enable Price Stability Mode",
|
||||
description="High volatility detected - enable stability constraints",
|
||||
impact="medium",
|
||||
confidence=0.9,
|
||||
action="Set max_price_change_percent to 0.15",
|
||||
expected_outcome="Reduced price volatility by 60%"
|
||||
))
|
||||
|
||||
if market_data.utilization_rate < 0.5:
|
||||
recommendations.append(PricingRecommendation(
|
||||
type="competitive_response",
|
||||
title="Aggressive Competitive Pricing",
|
||||
description="Low utilization suggests need for competitive pricing",
|
||||
impact="high",
|
||||
confidence=0.75,
|
||||
action="Set strategy to competitive_response",
|
||||
expected_outcome="+10-20% utilization increase"
|
||||
))
|
||||
|
||||
# Strategy-specific recommendations
|
||||
if current_strategy == PricingStrategy.MARKET_BALANCE:
|
||||
recommendations.append(PricingRecommendation(
|
||||
type="optimization",
|
||||
title="Consider Dynamic Strategy",
|
||||
description="Market conditions favor more dynamic pricing approach",
|
||||
impact="medium",
|
||||
confidence=0.7,
|
||||
action="Evaluate demand_elasticity or competitive_response strategies",
|
||||
expected_outcome="Improved market responsiveness"
|
||||
))
|
||||
|
||||
# Performance-based recommendations
|
||||
if provider_id in engine.pricing_history:
|
||||
history = engine.pricing_history[provider_id]
|
||||
if len(history) > 10:
|
||||
recent_prices = [point.price for point in history[-10:]]
|
||||
price_variance = sum((p - sum(recent_prices)/len(recent_prices))**2 for p in recent_prices) / len(recent_prices)
|
||||
|
||||
if price_variance > (sum(recent_prices)/len(recent_prices) * 0.01):
|
||||
recommendations.append(PricingRecommendation(
|
||||
type="stability",
|
||||
title="Reduce Price Variance",
|
||||
description="High price variance detected - consider stability improvements",
|
||||
impact="medium",
|
||||
confidence=0.8,
|
||||
action="Enable confidence_threshold of 0.8",
|
||||
expected_outcome="More stable pricing patterns"
|
||||
))
|
||||
|
||||
return recommendations
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get pricing recommendations: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# History and Analytics Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/history/{resource_id}", response_model=PriceHistoryResponse)
|
||||
async def get_price_history(
|
||||
resource_id: str,
|
||||
period: str = Query(default="7d", regex="^(1d|7d|30d|90d)$"),
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> PriceHistoryResponse:
|
||||
"""Get historical pricing data for a resource"""
|
||||
|
||||
try:
|
||||
# Parse period
|
||||
period_days = {"1d": 1, "7d": 7, "30d": 30, "90d": 90}
|
||||
days = period_days.get(period, 7)
|
||||
|
||||
# Get pricing history
|
||||
if resource_id not in engine.pricing_history:
|
||||
return PriceHistoryResponse(
|
||||
resource_id=resource_id,
|
||||
period=period,
|
||||
data_points=[],
|
||||
statistics={
|
||||
"average_price": 0,
|
||||
"min_price": 0,
|
||||
"max_price": 0,
|
||||
"price_volatility": 0,
|
||||
"total_changes": 0
|
||||
}
|
||||
)
|
||||
|
||||
# Filter history by period
|
||||
cutoff_time = datetime.utcnow() - timedelta(days=days)
|
||||
filtered_history = [
|
||||
point for point in engine.pricing_history[resource_id]
|
||||
if point.timestamp >= cutoff_time
|
||||
]
|
||||
|
||||
# Calculate statistics
|
||||
if filtered_history:
|
||||
prices = [point.price for point in filtered_history]
|
||||
average_price = sum(prices) / len(prices)
|
||||
min_price = min(prices)
|
||||
max_price = max(prices)
|
||||
|
||||
# Calculate volatility
|
||||
variance = sum((p - average_price) ** 2 for p in prices) / len(prices)
|
||||
price_volatility = (variance ** 0.5) / average_price if average_price > 0 else 0
|
||||
|
||||
# Count price changes
|
||||
total_changes = 0
|
||||
for i in range(1, len(filtered_history)):
|
||||
if abs(filtered_history[i].price - filtered_history[i-1].price) > 0.001:
|
||||
total_changes += 1
|
||||
else:
|
||||
average_price = min_price = max_price = price_volatility = total_changes = 0
|
||||
|
||||
return PriceHistoryResponse(
|
||||
resource_id=resource_id,
|
||||
period=period,
|
||||
data_points=[
|
||||
{
|
||||
"timestamp": point.timestamp.isoformat(),
|
||||
"price": point.price,
|
||||
"demand_level": point.demand_level,
|
||||
"supply_level": point.supply_level,
|
||||
"confidence": point.confidence,
|
||||
"strategy_used": point.strategy_used
|
||||
}
|
||||
for point in filtered_history
|
||||
],
|
||||
statistics={
|
||||
"average_price": average_price,
|
||||
"min_price": min_price,
|
||||
"max_price": max_price,
|
||||
"price_volatility": price_volatility,
|
||||
"total_changes": total_changes
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get price history: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Bulk Operations Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("/bulk-update", response_model=BulkPricingUpdateResponse)
|
||||
async def bulk_pricing_update(
|
||||
request: BulkPricingUpdateRequest,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> BulkPricingUpdateResponse:
|
||||
"""Bulk update pricing for multiple resources"""
|
||||
|
||||
try:
|
||||
results = []
|
||||
success_count = 0
|
||||
error_count = 0
|
||||
|
||||
for update in request.updates:
|
||||
try:
|
||||
# Validate strategy
|
||||
strategy_enum = PricingStrategy(update.strategy.lower())
|
||||
|
||||
# Parse constraints
|
||||
constraints = None
|
||||
if update.constraints:
|
||||
constraints = PriceConstraints(
|
||||
min_price=update.constraints.get("min_price"),
|
||||
max_price=update.constraints.get("max_price"),
|
||||
max_change_percent=update.constraints.get("max_change_percent", 0.5),
|
||||
min_change_interval=update.constraints.get("min_change_interval", 300),
|
||||
strategy_lock_period=update.constraints.get("strategy_lock_period", 3600)
|
||||
)
|
||||
|
||||
# Set strategy
|
||||
success = await engine.set_provider_strategy(update.provider_id, strategy_enum, constraints)
|
||||
|
||||
if success:
|
||||
success_count += 1
|
||||
results.append({
|
||||
"provider_id": update.provider_id,
|
||||
"status": "success",
|
||||
"message": "Strategy updated successfully"
|
||||
})
|
||||
else:
|
||||
error_count += 1
|
||||
results.append({
|
||||
"provider_id": update.provider_id,
|
||||
"status": "error",
|
||||
"message": "Failed to update strategy"
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
error_count += 1
|
||||
results.append({
|
||||
"provider_id": update.provider_id,
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
})
|
||||
|
||||
return BulkPricingUpdateResponse(
|
||||
total_updates=len(request.updates),
|
||||
success_count=success_count,
|
||||
error_count=error_count,
|
||||
results=results,
|
||||
processed_at=datetime.utcnow().isoformat()
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to process bulk update: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Health Check Endpoint
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/health")
|
||||
async def pricing_health_check(
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine),
|
||||
collector: MarketDataCollector = Depends(get_market_collector)
|
||||
) -> Dict[str, Any]:
|
||||
"""Health check for pricing services"""
|
||||
|
||||
try:
|
||||
# Check engine status
|
||||
engine_status = "healthy"
|
||||
engine_errors = []
|
||||
|
||||
if not engine.pricing_history:
|
||||
engine_errors.append("No pricing history available")
|
||||
|
||||
if not engine.provider_strategies:
|
||||
engine_errors.append("No provider strategies configured")
|
||||
|
||||
if engine_errors:
|
||||
engine_status = "degraded"
|
||||
|
||||
# Check collector status
|
||||
collector_status = "healthy"
|
||||
collector_errors = []
|
||||
|
||||
if not collector.aggregated_data:
|
||||
collector_errors.append("No aggregated market data available")
|
||||
|
||||
if len(collector.raw_data) < 10:
|
||||
collector_errors.append("Insufficient raw market data")
|
||||
|
||||
if collector_errors:
|
||||
collector_status = "degraded"
|
||||
|
||||
# Overall status
|
||||
overall_status = "healthy"
|
||||
if engine_status == "degraded" or collector_status == "degraded":
|
||||
overall_status = "degraded"
|
||||
|
||||
return {
|
||||
"status": overall_status,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"services": {
|
||||
"pricing_engine": {
|
||||
"status": engine_status,
|
||||
"errors": engine_errors,
|
||||
"providers_configured": len(engine.provider_strategies),
|
||||
"resources_tracked": len(engine.pricing_history)
|
||||
},
|
||||
"market_collector": {
|
||||
"status": collector_status,
|
||||
"errors": collector_errors,
|
||||
"data_points_collected": len(collector.raw_data),
|
||||
"aggregated_regions": len(collector.aggregated_data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "unhealthy",
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"error": str(e)
|
||||
}
|
||||
@@ -4,17 +4,49 @@ GPU marketplace endpoints backed by persistent SQLModel tables.
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import statistics
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from fastapi import APIRouter, HTTPException, Query, Depends
|
||||
from fastapi import status as http_status
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlmodel import select, func, col
|
||||
|
||||
from ..storage import SessionDep
|
||||
from ..domain.gpu_marketplace import GPURegistry, GPUBooking, GPUReview
|
||||
from ..services.dynamic_pricing_engine import DynamicPricingEngine, PricingStrategy, ResourceType
|
||||
from ..services.market_data_collector import MarketDataCollector
|
||||
|
||||
router = APIRouter(tags=["marketplace-gpu"])
|
||||
|
||||
# Global instances (in production, these would be dependency injected)
|
||||
pricing_engine = None
|
||||
market_collector = None
|
||||
|
||||
|
||||
async def get_pricing_engine() -> DynamicPricingEngine:
|
||||
"""Get pricing engine instance"""
|
||||
global pricing_engine
|
||||
if pricing_engine is None:
|
||||
pricing_engine = DynamicPricingEngine({
|
||||
"min_price": 0.001,
|
||||
"max_price": 1000.0,
|
||||
"update_interval": 300,
|
||||
"forecast_horizon": 72
|
||||
})
|
||||
await pricing_engine.initialize()
|
||||
return pricing_engine
|
||||
|
||||
|
||||
async def get_market_collector() -> MarketDataCollector:
|
||||
"""Get market data collector instance"""
|
||||
global market_collector
|
||||
if market_collector is None:
|
||||
market_collector = MarketDataCollector({
|
||||
"websocket_port": 8765
|
||||
})
|
||||
await market_collector.initialize()
|
||||
return market_collector
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Request schemas
|
||||
@@ -79,27 +111,55 @@ def _get_gpu_or_404(session, gpu_id: str) -> GPURegistry:
|
||||
async def register_gpu(
|
||||
request: Dict[str, Any],
|
||||
session: SessionDep,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> Dict[str, Any]:
|
||||
"""Register a GPU in the marketplace."""
|
||||
"""Register a GPU in the marketplace with dynamic pricing."""
|
||||
gpu_specs = request.get("gpu", {})
|
||||
|
||||
# Get initial price from request or calculate dynamically
|
||||
base_price = gpu_specs.get("price_per_hour", 0.05)
|
||||
|
||||
# Calculate dynamic price for new GPU
|
||||
try:
|
||||
dynamic_result = await engine.calculate_dynamic_price(
|
||||
resource_id=f"new_gpu_{gpu_specs.get('miner_id', 'unknown')}",
|
||||
resource_type=ResourceType.GPU,
|
||||
base_price=base_price,
|
||||
strategy=PricingStrategy.MARKET_BALANCE,
|
||||
region=gpu_specs.get("region", "global")
|
||||
)
|
||||
# Use dynamic price for initial listing
|
||||
initial_price = dynamic_result.recommended_price
|
||||
except Exception:
|
||||
# Fallback to base price if dynamic pricing fails
|
||||
initial_price = base_price
|
||||
|
||||
gpu = GPURegistry(
|
||||
miner_id=gpu_specs.get("miner_id", ""),
|
||||
model=gpu_specs.get("name", "Unknown GPU"),
|
||||
memory_gb=gpu_specs.get("memory", 0),
|
||||
cuda_version=gpu_specs.get("cuda_version", "Unknown"),
|
||||
region=gpu_specs.get("region", "unknown"),
|
||||
price_per_hour=gpu_specs.get("price_per_hour", 0.0),
|
||||
price_per_hour=initial_price,
|
||||
capabilities=gpu_specs.get("capabilities", []),
|
||||
)
|
||||
session.add(gpu)
|
||||
session.commit()
|
||||
session.refresh(gpu)
|
||||
|
||||
# Set up pricing strategy for this GPU provider
|
||||
await engine.set_provider_strategy(
|
||||
provider_id=gpu.miner_id,
|
||||
strategy=PricingStrategy.MARKET_BALANCE
|
||||
)
|
||||
|
||||
return {
|
||||
"gpu_id": gpu.id,
|
||||
"status": "registered",
|
||||
"message": f"GPU {gpu.model} registered successfully",
|
||||
"base_price": base_price,
|
||||
"dynamic_price": initial_price,
|
||||
"pricing_strategy": "market_balance"
|
||||
}
|
||||
|
||||
|
||||
@@ -154,8 +214,13 @@ async def get_gpu_details(gpu_id: str, session: SessionDep) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.post("/marketplace/gpu/{gpu_id}/book", status_code=http_status.HTTP_201_CREATED)
|
||||
async def book_gpu(gpu_id: str, request: GPUBookRequest, session: SessionDep) -> Dict[str, Any]:
|
||||
"""Book a GPU."""
|
||||
async def book_gpu(
|
||||
gpu_id: str,
|
||||
request: GPUBookRequest,
|
||||
session: SessionDep,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine)
|
||||
) -> Dict[str, Any]:
|
||||
"""Book a GPU with dynamic pricing."""
|
||||
gpu = _get_gpu_or_404(session, gpu_id)
|
||||
|
||||
if gpu.status != "available":
|
||||
@@ -166,7 +231,23 @@ async def book_gpu(gpu_id: str, request: GPUBookRequest, session: SessionDep) ->
|
||||
|
||||
start_time = datetime.utcnow()
|
||||
end_time = start_time + timedelta(hours=request.duration_hours)
|
||||
total_cost = request.duration_hours * gpu.price_per_hour
|
||||
|
||||
# Calculate dynamic price at booking time
|
||||
try:
|
||||
dynamic_result = await engine.calculate_dynamic_price(
|
||||
resource_id=gpu_id,
|
||||
resource_type=ResourceType.GPU,
|
||||
base_price=gpu.price_per_hour,
|
||||
strategy=PricingStrategy.MARKET_BALANCE,
|
||||
region=gpu.region
|
||||
)
|
||||
# Use dynamic price for this booking
|
||||
current_price = dynamic_result.recommended_price
|
||||
except Exception:
|
||||
# Fallback to stored price if dynamic pricing fails
|
||||
current_price = gpu.price_per_hour
|
||||
|
||||
total_cost = request.duration_hours * current_price
|
||||
|
||||
booking = GPUBooking(
|
||||
gpu_id=gpu_id,
|
||||
@@ -186,8 +267,13 @@ async def book_gpu(gpu_id: str, request: GPUBookRequest, session: SessionDep) ->
|
||||
"gpu_id": gpu_id,
|
||||
"status": "booked",
|
||||
"total_cost": booking.total_cost,
|
||||
"base_price": gpu.price_per_hour,
|
||||
"dynamic_price": current_price,
|
||||
"price_per_hour": current_price,
|
||||
"start_time": booking.start_time.isoformat() + "Z",
|
||||
"end_time": booking.end_time.isoformat() + "Z",
|
||||
"pricing_factors": dynamic_result.factors_exposed if 'dynamic_result' in locals() else {},
|
||||
"confidence_score": dynamic_result.confidence_score if 'dynamic_result' in locals() else 0.8
|
||||
}
|
||||
|
||||
|
||||
@@ -324,8 +410,13 @@ async def list_orders(
|
||||
|
||||
|
||||
@router.get("/marketplace/pricing/{model}")
|
||||
async def get_pricing(model: str, session: SessionDep) -> Dict[str, Any]:
|
||||
"""Get pricing information for a model."""
|
||||
async def get_pricing(
|
||||
model: str,
|
||||
session: SessionDep,
|
||||
engine: DynamicPricingEngine = Depends(get_pricing_engine),
|
||||
collector: MarketDataCollector = Depends(get_market_collector)
|
||||
) -> Dict[str, Any]:
|
||||
"""Get enhanced pricing information for a model with dynamic pricing."""
|
||||
# SQLite JSON doesn't support array contains, so fetch all and filter in Python
|
||||
all_gpus = session.exec(select(GPURegistry)).all()
|
||||
compatible = [
|
||||
@@ -339,15 +430,97 @@ async def get_pricing(model: str, session: SessionDep) -> Dict[str, Any]:
|
||||
detail=f"No GPUs found for model {model}",
|
||||
)
|
||||
|
||||
prices = [g.price_per_hour for g in compatible]
|
||||
# Get static pricing information
|
||||
static_prices = [g.price_per_hour for g in compatible]
|
||||
cheapest = min(compatible, key=lambda g: g.price_per_hour)
|
||||
|
||||
# Calculate dynamic prices for compatible GPUs
|
||||
dynamic_prices = []
|
||||
for gpu in compatible:
|
||||
try:
|
||||
dynamic_result = await engine.calculate_dynamic_price(
|
||||
resource_id=gpu.id,
|
||||
resource_type=ResourceType.GPU,
|
||||
base_price=gpu.price_per_hour,
|
||||
strategy=PricingStrategy.MARKET_BALANCE,
|
||||
region=gpu.region
|
||||
)
|
||||
dynamic_prices.append({
|
||||
"gpu_id": gpu.id,
|
||||
"static_price": gpu.price_per_hour,
|
||||
"dynamic_price": dynamic_result.recommended_price,
|
||||
"price_change": dynamic_result.recommended_price - gpu.price_per_hour,
|
||||
"price_change_percent": ((dynamic_result.recommended_price - gpu.price_per_hour) / gpu.price_per_hour) * 100,
|
||||
"confidence": dynamic_result.confidence_score,
|
||||
"trend": dynamic_result.price_trend.value,
|
||||
"reasoning": dynamic_result.reasoning
|
||||
})
|
||||
except Exception as e:
|
||||
# Fallback to static price if dynamic pricing fails
|
||||
dynamic_prices.append({
|
||||
"gpu_id": gpu.id,
|
||||
"static_price": gpu.price_per_hour,
|
||||
"dynamic_price": gpu.price_per_hour,
|
||||
"price_change": 0.0,
|
||||
"price_change_percent": 0.0,
|
||||
"confidence": 0.5,
|
||||
"trend": "unknown",
|
||||
"reasoning": ["Dynamic pricing unavailable"]
|
||||
})
|
||||
|
||||
# Calculate aggregate dynamic pricing metrics
|
||||
dynamic_price_values = [dp["dynamic_price"] for dp in dynamic_prices]
|
||||
avg_dynamic_price = sum(dynamic_price_values) / len(dynamic_price_values)
|
||||
|
||||
# Find best value GPU (considering price and confidence)
|
||||
best_value_gpu = min(dynamic_prices, key=lambda x: x["dynamic_price"] / x["confidence"])
|
||||
|
||||
# Get market analysis
|
||||
market_analysis = None
|
||||
try:
|
||||
# Get market data for the most common region
|
||||
regions = [gpu.region for gpu in compatible]
|
||||
most_common_region = max(set(regions), key=regions.count) if regions else "global"
|
||||
|
||||
market_data = await collector.get_aggregated_data("gpu", most_common_region)
|
||||
if market_data:
|
||||
market_analysis = {
|
||||
"demand_level": market_data.demand_level,
|
||||
"supply_level": market_data.supply_level,
|
||||
"market_volatility": market_data.price_volatility,
|
||||
"utilization_rate": market_data.utilization_rate,
|
||||
"market_sentiment": market_data.market_sentiment,
|
||||
"confidence_score": market_data.confidence_score
|
||||
}
|
||||
except Exception:
|
||||
market_analysis = None
|
||||
|
||||
return {
|
||||
"model": model,
|
||||
"min_price": min(prices),
|
||||
"max_price": max(prices),
|
||||
"average_price": sum(prices) / len(prices),
|
||||
"available_gpus": len([g for g in compatible if g.status == "available"]),
|
||||
"total_gpus": len(compatible),
|
||||
"recommended_gpu": cheapest.id,
|
||||
"static_pricing": {
|
||||
"min_price": min(static_prices),
|
||||
"max_price": max(static_prices),
|
||||
"average_price": sum(static_prices) / len(static_prices),
|
||||
"available_gpus": len([g for g in compatible if g.status == "available"]),
|
||||
"total_gpus": len(compatible),
|
||||
"recommended_gpu": cheapest.id,
|
||||
},
|
||||
"dynamic_pricing": {
|
||||
"min_price": min(dynamic_price_values),
|
||||
"max_price": max(dynamic_price_values),
|
||||
"average_price": avg_dynamic_price,
|
||||
"price_volatility": statistics.stdev(dynamic_price_values) if len(dynamic_price_values) > 1 else 0,
|
||||
"avg_confidence": sum(dp["confidence"] for dp in dynamic_prices) / len(dynamic_prices),
|
||||
"recommended_gpu": best_value_gpu["gpu_id"],
|
||||
"recommended_price": best_value_gpu["dynamic_price"],
|
||||
},
|
||||
"price_comparison": {
|
||||
"avg_price_change": avg_dynamic_price - (sum(static_prices) / len(static_prices)),
|
||||
"avg_price_change_percent": ((avg_dynamic_price - (sum(static_prices) / len(static_prices))) / (sum(static_prices) / len(static_prices))) * 100,
|
||||
"gpus_with_price_increase": len([dp for dp in dynamic_prices if dp["price_change"] > 0]),
|
||||
"gpus_with_price_decrease": len([dp for dp in dynamic_prices if dp["price_change"] < 0]),
|
||||
},
|
||||
"individual_gpu_pricing": dynamic_prices,
|
||||
"market_analysis": market_analysis,
|
||||
"pricing_timestamp": datetime.utcnow().isoformat() + "Z"
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ from ..domain.reputation import (
|
||||
AgentReputation, CommunityFeedback, ReputationLevel,
|
||||
TrustScoreCategory
|
||||
)
|
||||
from sqlmodel import select, func, Field
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -522,3 +523,267 @@ async def update_region(
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating region for {agent_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
# Cross-Chain Reputation Endpoints
|
||||
@router.get("/{agent_id}/cross-chain")
|
||||
async def get_cross_chain_reputation(
|
||||
agent_id: str,
|
||||
session: SessionDep,
|
||||
reputation_service: ReputationService = Depends()
|
||||
) -> Dict[str, Any]:
|
||||
"""Get cross-chain reputation data for an agent"""
|
||||
|
||||
try:
|
||||
# Get basic reputation
|
||||
reputation = session.exec(
|
||||
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
|
||||
).first()
|
||||
|
||||
if not reputation:
|
||||
raise HTTPException(status_code=404, detail="Reputation profile not found")
|
||||
|
||||
# For now, return single-chain data with cross-chain structure
|
||||
# This will be extended when full cross-chain implementation is ready
|
||||
return {
|
||||
"agent_id": agent_id,
|
||||
"cross_chain": {
|
||||
"aggregated_score": reputation.trust_score / 1000.0, # Convert to 0-1 scale
|
||||
"chain_count": 1,
|
||||
"active_chains": [1], # Default to Ethereum mainnet
|
||||
"chain_scores": {1: reputation.trust_score / 1000.0},
|
||||
"consistency_score": 1.0,
|
||||
"verification_status": "verified"
|
||||
},
|
||||
"chain_reputations": {
|
||||
1: {
|
||||
"trust_score": reputation.trust_score,
|
||||
"reputation_level": reputation.reputation_level.value,
|
||||
"transaction_count": reputation.transaction_count,
|
||||
"success_rate": reputation.success_rate,
|
||||
"last_updated": reputation.updated_at.isoformat()
|
||||
}
|
||||
},
|
||||
"last_updated": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cross-chain reputation for {agent_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.post("/{agent_id}/cross-chain/sync")
|
||||
async def sync_cross_chain_reputation(
|
||||
agent_id: str,
|
||||
background_tasks: Any, # FastAPI BackgroundTasks
|
||||
session: SessionDep,
|
||||
reputation_service: ReputationService = Depends()
|
||||
) -> Dict[str, Any]:
|
||||
"""Synchronize reputation across chains for an agent"""
|
||||
|
||||
try:
|
||||
# Get reputation
|
||||
reputation = session.exec(
|
||||
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
|
||||
).first()
|
||||
|
||||
if not reputation:
|
||||
raise HTTPException(status_code=404, detail="Reputation profile not found")
|
||||
|
||||
# For now, return success (full implementation will be added)
|
||||
return {
|
||||
"agent_id": agent_id,
|
||||
"sync_status": "completed",
|
||||
"chains_synced": [1],
|
||||
"sync_timestamp": datetime.utcnow().isoformat(),
|
||||
"message": "Cross-chain reputation synchronized successfully"
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error syncing cross-chain reputation for {agent_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get("/cross-chain/leaderboard")
|
||||
async def get_cross_chain_leaderboard(
|
||||
limit: int = Query(50, ge=1, le=100),
|
||||
min_score: float = Query(0.0, ge=0.0, le=1.0),
|
||||
session: SessionDep,
|
||||
reputation_service: ReputationService = Depends()
|
||||
) -> Dict[str, Any]:
|
||||
"""Get cross-chain reputation leaderboard"""
|
||||
|
||||
try:
|
||||
# Get top reputations
|
||||
reputations = session.exec(
|
||||
select(AgentReputation)
|
||||
.where(AgentReputation.trust_score >= min_score * 1000)
|
||||
.order_by(AgentReputation.trust_score.desc())
|
||||
.limit(limit)
|
||||
).all()
|
||||
|
||||
agents = []
|
||||
for rep in reputations:
|
||||
agents.append({
|
||||
"agent_id": rep.agent_id,
|
||||
"aggregated_score": rep.trust_score / 1000.0,
|
||||
"chain_count": 1,
|
||||
"active_chains": [1],
|
||||
"consistency_score": 1.0,
|
||||
"verification_status": "verified",
|
||||
"trust_score": rep.trust_score,
|
||||
"reputation_level": rep.reputation_level.value,
|
||||
"transaction_count": rep.transaction_count,
|
||||
"success_rate": rep.success_rate,
|
||||
"last_updated": rep.updated_at.isoformat()
|
||||
})
|
||||
|
||||
return {
|
||||
"agents": agents,
|
||||
"total_count": len(agents),
|
||||
"limit": limit,
|
||||
"min_score": min_score,
|
||||
"last_updated": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cross-chain leaderboard: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.post("/cross-chain/events")
|
||||
async def submit_cross_chain_event(
|
||||
event_data: Dict[str, Any],
|
||||
background_tasks: Any, # FastAPI BackgroundTasks
|
||||
session: SessionDep,
|
||||
reputation_service: ReputationService = Depends()
|
||||
) -> Dict[str, Any]:
|
||||
"""Submit a cross-chain reputation event"""
|
||||
|
||||
try:
|
||||
# Validate event data
|
||||
required_fields = ['agent_id', 'event_type', 'impact_score']
|
||||
for field in required_fields:
|
||||
if field not in event_data:
|
||||
raise HTTPException(status_code=400, detail=f"Missing required field: {field}")
|
||||
|
||||
agent_id = event_data['agent_id']
|
||||
|
||||
# Get reputation
|
||||
reputation = session.exec(
|
||||
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
|
||||
).first()
|
||||
|
||||
if not reputation:
|
||||
raise HTTPException(status_code=404, detail="Reputation profile not found")
|
||||
|
||||
# Update reputation based on event
|
||||
impact = event_data['impact_score']
|
||||
old_score = reputation.trust_score
|
||||
new_score = max(0, min(1000, old_score + (impact * 1000)))
|
||||
|
||||
reputation.trust_score = new_score
|
||||
reputation.updated_at = datetime.utcnow()
|
||||
|
||||
# Update reputation level if needed
|
||||
if new_score >= 900:
|
||||
reputation.reputation_level = ReputationLevel.MASTER
|
||||
elif new_score >= 800:
|
||||
reputation.reputation_level = ReputationLevel.EXPERT
|
||||
elif new_score >= 600:
|
||||
reputation.reputation_level = ReputationLevel.ADVANCED
|
||||
elif new_score >= 400:
|
||||
reputation.reputation_level = ReputationLevel.INTERMEDIATE
|
||||
else:
|
||||
reputation.reputation_level = ReputationLevel.BEGINNER
|
||||
|
||||
session.commit()
|
||||
|
||||
return {
|
||||
"event_id": f"event_{datetime.utcnow().strftime('%Y%m%d%H%M%S')}",
|
||||
"agent_id": agent_id,
|
||||
"event_type": event_data['event_type'],
|
||||
"impact_score": impact,
|
||||
"old_score": old_score / 1000.0,
|
||||
"new_score": new_score / 1000.0,
|
||||
"processed_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error submitting cross-chain event: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
|
||||
@router.get("/cross-chain/analytics")
|
||||
async def get_cross_chain_analytics(
|
||||
chain_id: Optional[int] = Query(None),
|
||||
session: SessionDep,
|
||||
reputation_service: ReputationService = Depends()
|
||||
) -> Dict[str, Any]:
|
||||
"""Get cross-chain reputation analytics"""
|
||||
|
||||
try:
|
||||
# Get basic statistics
|
||||
total_agents = session.exec(select(func.count(AgentReputation.id))).first()
|
||||
avg_reputation = session.exec(select(func.avg(AgentReputation.trust_score))).first() or 0.0
|
||||
|
||||
# Get reputation distribution
|
||||
reputations = session.exec(select(AgentReputation)).all()
|
||||
|
||||
distribution = {
|
||||
"master": 0,
|
||||
"expert": 0,
|
||||
"advanced": 0,
|
||||
"intermediate": 0,
|
||||
"beginner": 0
|
||||
}
|
||||
|
||||
score_ranges = {
|
||||
"0.0-0.2": 0,
|
||||
"0.2-0.4": 0,
|
||||
"0.4-0.6": 0,
|
||||
"0.6-0.8": 0,
|
||||
"0.8-1.0": 0
|
||||
}
|
||||
|
||||
for rep in reputations:
|
||||
# Level distribution
|
||||
level = rep.reputation_level.value
|
||||
distribution[level] = distribution.get(level, 0) + 1
|
||||
|
||||
# Score distribution
|
||||
score = rep.trust_score / 1000.0
|
||||
if score < 0.2:
|
||||
score_ranges["0.0-0.2"] += 1
|
||||
elif score < 0.4:
|
||||
score_ranges["0.2-0.4"] += 1
|
||||
elif score < 0.6:
|
||||
score_ranges["0.4-0.6"] += 1
|
||||
elif score < 0.8:
|
||||
score_ranges["0.6-0.8"] += 1
|
||||
else:
|
||||
score_ranges["0.8-1.0"] += 1
|
||||
|
||||
return {
|
||||
"chain_id": chain_id or 1,
|
||||
"total_agents": total_agents,
|
||||
"average_reputation": avg_reputation / 1000.0,
|
||||
"reputation_distribution": distribution,
|
||||
"score_distribution": score_ranges,
|
||||
"cross_chain_metrics": {
|
||||
"cross_chain_agents": total_agents, # All agents for now
|
||||
"average_consistency_score": 1.0,
|
||||
"chain_diversity_score": 0.0 # No cross-chain diversity yet
|
||||
},
|
||||
"generated_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cross-chain analytics: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
Reference in New Issue
Block a user