Update database paths and fix foreign key references across coordinator API

- Change SQLite database path from `/home/oib/windsurf/aitbc/data/` to `/opt/data/`
- Fix foreign key references to use correct table names (users, wallets, gpu_registry)
- Replace governance router with new governance and community routers
- Add multi-modal RL router to main application
- Simplify DEPLOYMENT_READINESS_REPORT.md to focus on production deployment status
- Update governance router with decentralized DAO voting
This commit is contained in:
oib
2026-02-26 19:32:06 +01:00
parent 1e2ea0bb9d
commit 7bb2905cca
89 changed files with 38245 additions and 1260 deletions

View File

@@ -0,0 +1,190 @@
"""
Agent Creativity API Endpoints
REST API for agent creativity enhancement, ideation, and cross-domain synthesis
"""
from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.creative_capabilities_service import (
CreativityEnhancementEngine, IdeationAlgorithm, CrossDomainCreativeIntegrator
)
from ..domain.agent_performance import CreativeCapability
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/agent-creativity", tags=["agent-creativity"])
# Models
class CreativeCapabilityCreate(BaseModel):
agent_id: str
creative_domain: str = Field(..., description="e.g., artistic, design, innovation, scientific, narrative")
capability_type: str = Field(..., description="e.g., generative, compositional, analytical, innovative")
generation_models: List[str]
initial_score: float = Field(0.5, ge=0.0, le=1.0)
class CreativeCapabilityResponse(BaseModel):
capability_id: str
agent_id: str
creative_domain: str
capability_type: str
originality_score: float
novelty_score: float
aesthetic_quality: float
coherence_score: float
style_variety: int
creative_specializations: List[str]
status: str
class EnhanceCreativityRequest(BaseModel):
algorithm: str = Field("divergent_thinking", description="divergent_thinking, conceptual_blending, morphological_analysis, lateral_thinking, bisociation")
training_cycles: int = Field(100, ge=1, le=1000)
class EvaluateCreationRequest(BaseModel):
creation_data: Dict[str, Any]
expert_feedback: Optional[Dict[str, float]] = None
class IdeationRequest(BaseModel):
problem_statement: str
domain: str
technique: str = Field("scamper", description="scamper, triz, six_thinking_hats, first_principles, biomimicry")
num_ideas: int = Field(5, ge=1, le=20)
constraints: Optional[Dict[str, Any]] = None
class SynthesisRequest(BaseModel):
agent_id: str
primary_domain: str
secondary_domains: List[str]
synthesis_goal: str
# Endpoints
@router.post("/capabilities", response_model=CreativeCapabilityResponse)
async def create_creative_capability(
request: CreativeCapabilityCreate,
session: SessionDep
):
"""Initialize a new creative capability for an agent"""
engine = CreativityEnhancementEngine()
try:
capability = await engine.create_creative_capability(
session=session,
agent_id=request.agent_id,
creative_domain=request.creative_domain,
capability_type=request.capability_type,
generation_models=request.generation_models,
initial_score=request.initial_score
)
return capability
except Exception as e:
logger.error(f"Error creating creative capability: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/capabilities/{capability_id}/enhance")
async def enhance_creativity(
capability_id: str,
request: EnhanceCreativityRequest,
session: SessionDep
):
"""Enhance a specific creative capability using specified algorithm"""
engine = CreativityEnhancementEngine()
try:
result = await engine.enhance_creativity(
session=session,
capability_id=capability_id,
algorithm=request.algorithm,
training_cycles=request.training_cycles
)
return result
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error enhancing creativity: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/capabilities/{capability_id}/evaluate")
async def evaluate_creation(
capability_id: str,
request: EvaluateCreationRequest,
session: SessionDep
):
"""Evaluate a creative output and update agent capability metrics"""
engine = CreativityEnhancementEngine()
try:
result = await engine.evaluate_creation(
session=session,
capability_id=capability_id,
creation_data=request.creation_data,
expert_feedback=request.expert_feedback
)
return result
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error evaluating creation: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/ideation/generate")
async def generate_ideas(request: IdeationRequest):
"""Generate innovative ideas using specialized ideation algorithms"""
ideation_engine = IdeationAlgorithm()
try:
result = await ideation_engine.generate_ideas(
problem_statement=request.problem_statement,
domain=request.domain,
technique=request.technique,
num_ideas=request.num_ideas,
constraints=request.constraints
)
return result
except Exception as e:
logger.error(f"Error generating ideas: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/synthesis/cross-domain")
async def synthesize_cross_domain(
request: SynthesisRequest,
session: SessionDep
):
"""Synthesize concepts from multiple domains to create novel outputs"""
integrator = CrossDomainCreativeIntegrator()
try:
result = await integrator.generate_cross_domain_synthesis(
session=session,
agent_id=request.agent_id,
primary_domain=request.primary_domain,
secondary_domains=request.secondary_domains,
synthesis_goal=request.synthesis_goal
)
return result
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error in cross-domain synthesis: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/capabilities/{agent_id}")
async def list_agent_creative_capabilities(
agent_id: str,
session: SessionDep
):
"""List all creative capabilities for a specific agent"""
try:
capabilities = session.exec(
select(CreativeCapability).where(CreativeCapability.agent_id == agent_id)
).all()
return capabilities
except Exception as e:
logger.error(f"Error fetching creative capabilities: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,721 @@
"""
Advanced Agent Performance API Endpoints
REST API for meta-learning, resource optimization, and performance enhancement
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.agent_performance_service import (
AgentPerformanceService, MetaLearningEngine, ResourceManager, PerformanceOptimizer
)
from ..domain.agent_performance import (
AgentPerformanceProfile, MetaLearningModel, ResourceAllocation,
PerformanceOptimization, AgentCapability, FusionModel,
ReinforcementLearningConfig, CreativeCapability,
LearningStrategy, PerformanceMetric, ResourceType,
OptimizationTarget
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/agent-performance", tags=["agent-performance"])
# Pydantic models for API requests/responses
class PerformanceProfileRequest(BaseModel):
"""Request model for performance profile creation"""
agent_id: str
agent_type: str = Field(default="openclaw")
initial_metrics: Dict[str, float] = Field(default_factory=dict)
class PerformanceProfileResponse(BaseModel):
"""Response model for performance profile"""
profile_id: str
agent_id: str
agent_type: str
overall_score: float
performance_metrics: Dict[str, float]
learning_strategies: List[str]
specialization_areas: List[str]
expertise_levels: Dict[str, float]
resource_efficiency: Dict[str, float]
cost_per_task: float
throughput: float
average_latency: float
last_assessed: Optional[str]
created_at: str
updated_at: str
class MetaLearningRequest(BaseModel):
"""Request model for meta-learning model creation"""
model_name: str
base_algorithms: List[str]
meta_strategy: LearningStrategy
adaptation_targets: List[str]
class MetaLearningResponse(BaseModel):
"""Response model for meta-learning model"""
model_id: str
model_name: str
model_type: str
meta_strategy: str
adaptation_targets: List[str]
meta_accuracy: float
adaptation_speed: float
generalization_ability: float
status: str
created_at: str
trained_at: Optional[str]
class ResourceAllocationRequest(BaseModel):
"""Request model for resource allocation"""
agent_id: str
task_requirements: Dict[str, Any]
optimization_target: OptimizationTarget = Field(default=OptimizationTarget.EFFICIENCY)
priority_level: str = Field(default="normal")
class ResourceAllocationResponse(BaseModel):
"""Response model for resource allocation"""
allocation_id: str
agent_id: str
cpu_cores: float
memory_gb: float
gpu_count: float
gpu_memory_gb: float
storage_gb: float
network_bandwidth: float
optimization_target: str
status: str
allocated_at: str
class PerformanceOptimizationRequest(BaseModel):
"""Request model for performance optimization"""
agent_id: str
target_metric: PerformanceMetric
current_performance: Dict[str, float]
optimization_type: str = Field(default="comprehensive")
class PerformanceOptimizationResponse(BaseModel):
"""Response model for performance optimization"""
optimization_id: str
agent_id: str
optimization_type: str
target_metric: str
status: str
performance_improvement: float
resource_savings: float
cost_savings: float
overall_efficiency_gain: float
created_at: str
completed_at: Optional[str]
class CapabilityRequest(BaseModel):
"""Request model for agent capability"""
agent_id: str
capability_name: str
capability_type: str
domain_area: str
skill_level: float = Field(ge=0, le=10.0)
specialization_areas: List[str] = Field(default_factory=list)
class CapabilityResponse(BaseModel):
"""Response model for agent capability"""
capability_id: str
agent_id: str
capability_name: str
capability_type: str
domain_area: str
skill_level: float
proficiency_score: float
specialization_areas: List[str]
status: str
created_at: str
# API Endpoints
@router.post("/profiles", response_model=PerformanceProfileResponse)
async def create_performance_profile(
profile_request: PerformanceProfileRequest,
session: SessionDep
) -> PerformanceProfileResponse:
"""Create agent performance profile"""
performance_service = AgentPerformanceService(session)
try:
profile = await performance_service.create_performance_profile(
agent_id=profile_request.agent_id,
agent_type=profile_request.agent_type,
initial_metrics=profile_request.initial_metrics
)
return PerformanceProfileResponse(
profile_id=profile.profile_id,
agent_id=profile.agent_id,
agent_type=profile.agent_type,
overall_score=profile.overall_score,
performance_metrics=profile.performance_metrics,
learning_strategies=profile.learning_strategies,
specialization_areas=profile.specialization_areas,
expertise_levels=profile.expertise_levels,
resource_efficiency=profile.resource_efficiency,
cost_per_task=profile.cost_per_task,
throughput=profile.throughput,
average_latency=profile.average_latency,
last_assessed=profile.last_assessed.isoformat() if profile.last_assessed else None,
created_at=profile.created_at.isoformat(),
updated_at=profile.updated_at.isoformat()
)
except Exception as e:
logger.error(f"Error creating performance profile: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/profiles/{agent_id}", response_model=Dict[str, Any])
async def get_performance_profile(
agent_id: str,
session: SessionDep
) -> Dict[str, Any]:
"""Get agent performance profile"""
performance_service = AgentPerformanceService(session)
try:
profile = await performance_service.get_comprehensive_profile(agent_id)
if 'error' in profile:
raise HTTPException(status_code=404, detail=profile['error'])
return profile
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting performance profile for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/profiles/{agent_id}/metrics")
async def update_performance_metrics(
agent_id: str,
metrics: Dict[str, float],
task_context: Optional[Dict[str, Any]] = None,
session: SessionDep
) -> Dict[str, Any]:
"""Update agent performance metrics"""
performance_service = AgentPerformanceService(session)
try:
profile = await performance_service.update_performance_metrics(
agent_id=agent_id,
new_metrics=metrics,
task_context=task_context
)
return {
"success": True,
"profile_id": profile.profile_id,
"overall_score": profile.overall_score,
"updated_at": profile.updated_at.isoformat(),
"improvement_trends": profile.improvement_trends
}
except Exception as e:
logger.error(f"Error updating performance metrics for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/meta-learning/models", response_model=MetaLearningResponse)
async def create_meta_learning_model(
model_request: MetaLearningRequest,
session: SessionDep
) -> MetaLearningResponse:
"""Create meta-learning model"""
meta_learning_engine = MetaLearningEngine()
try:
model = await meta_learning_engine.create_meta_learning_model(
session=session,
model_name=model_request.model_name,
base_algorithms=model_request.base_algorithms,
meta_strategy=model_request.meta_strategy,
adaptation_targets=model_request.adaptation_targets
)
return MetaLearningResponse(
model_id=model.model_id,
model_name=model.model_name,
model_type=model.model_type,
meta_strategy=model.meta_strategy.value,
adaptation_targets=model.adaptation_targets,
meta_accuracy=model.meta_accuracy,
adaptation_speed=model.adaptation_speed,
generalization_ability=model.generalization_ability,
status=model.status,
created_at=model.created_at.isoformat(),
trained_at=model.trained_at.isoformat() if model.trained_at else None
)
except Exception as e:
logger.error(f"Error creating meta-learning model: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/meta-learning/models/{model_id}/adapt")
async def adapt_model_to_task(
model_id: str,
task_data: Dict[str, Any],
adaptation_steps: int = Query(default=10, ge=1, le=50),
session: SessionDep
) -> Dict[str, Any]:
"""Adapt meta-learning model to new task"""
meta_learning_engine = MetaLearningEngine()
try:
results = await meta_learning_engine.adapt_to_new_task(
session=session,
model_id=model_id,
task_data=task_data,
adaptation_steps=adaptation_steps
)
return {
"success": True,
"model_id": model_id,
"adaptation_results": results,
"adapted_at": datetime.utcnow().isoformat()
}
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error adapting model {model_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/meta-learning/models")
async def list_meta_learning_models(
status: Optional[str] = Query(default=None, description="Filter by status"),
meta_strategy: Optional[str] = Query(default=None, description="Filter by meta strategy"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""List meta-learning models"""
try:
query = select(MetaLearningModel)
if status:
query = query.where(MetaLearningModel.status == status)
if meta_strategy:
query = query.where(MetaLearningModel.meta_strategy == LearningStrategy(meta_strategy))
models = session.exec(
query.order_by(MetaLearningModel.created_at.desc()).limit(limit)
).all()
return [
{
"model_id": model.model_id,
"model_name": model.model_name,
"model_type": model.model_type,
"meta_strategy": model.meta_strategy.value,
"adaptation_targets": model.adaptation_targets,
"meta_accuracy": model.meta_accuracy,
"adaptation_speed": model.adaptation_speed,
"generalization_ability": model.generalization_ability,
"status": model.status,
"deployment_count": model.deployment_count,
"success_rate": model.success_rate,
"created_at": model.created_at.isoformat(),
"trained_at": model.trained_at.isoformat() if model.trained_at else None
}
for model in models
]
except Exception as e:
logger.error(f"Error listing meta-learning models: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/resources/allocate", response_model=ResourceAllocationResponse)
async def allocate_resources(
allocation_request: ResourceAllocationRequest,
session: SessionDep
) -> ResourceAllocationResponse:
"""Allocate resources for agent task"""
resource_manager = ResourceManager()
try:
allocation = await resource_manager.allocate_resources(
session=session,
agent_id=allocation_request.agent_id,
task_requirements=allocation_request.task_requirements,
optimization_target=allocation_request.optimization_target
)
return ResourceAllocationResponse(
allocation_id=allocation.allocation_id,
agent_id=allocation.agent_id,
cpu_cores=allocation.cpu_cores,
memory_gb=allocation.memory_gb,
gpu_count=allocation.gpu_count,
gpu_memory_gb=allocation.gpu_memory_gb,
storage_gb=allocation.storage_gb,
network_bandwidth=allocation.network_bandwidth,
optimization_target=allocation.optimization_target.value,
status=allocation.status,
allocated_at=allocation.allocated_at.isoformat()
)
except Exception as e:
logger.error(f"Error allocating resources: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/resources/{agent_id}")
async def get_resource_allocations(
agent_id: str,
status: Optional[str] = Query(default=None, description="Filter by status"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get resource allocations for agent"""
try:
query = select(ResourceAllocation).where(ResourceAllocation.agent_id == agent_id)
if status:
query = query.where(ResourceAllocation.status == status)
allocations = session.exec(
query.order_by(ResourceAllocation.created_at.desc()).limit(limit)
).all()
return [
{
"allocation_id": allocation.allocation_id,
"agent_id": allocation.agent_id,
"task_id": allocation.task_id,
"cpu_cores": allocation.cpu_cores,
"memory_gb": allocation.memory_gb,
"gpu_count": allocation.gpu_count,
"gpu_memory_gb": allocation.gpu_memory_gb,
"storage_gb": allocation.storage_gb,
"network_bandwidth": allocation.network_bandwidth,
"optimization_target": allocation.optimization_target.value,
"priority_level": allocation.priority_level,
"status": allocation.status,
"efficiency_score": allocation.efficiency_score,
"cost_efficiency": allocation.cost_efficiency,
"allocated_at": allocation.allocated_at.isoformat() if allocation.allocated_at else None,
"started_at": allocation.started_at.isoformat() if allocation.started_at else None,
"completed_at": allocation.completed_at.isoformat() if allocation.completed_at else None
}
for allocation in allocations
]
except Exception as e:
logger.error(f"Error getting resource allocations for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/optimization/optimize", response_model=PerformanceOptimizationResponse)
async def optimize_performance(
optimization_request: PerformanceOptimizationRequest,
session: SessionDep
) -> PerformanceOptimizationResponse:
"""Optimize agent performance"""
performance_optimizer = PerformanceOptimizer()
try:
optimization = await performance_optimizer.optimize_agent_performance(
session=session,
agent_id=optimization_request.agent_id,
target_metric=optimization_request.target_metric,
current_performance=optimization_request.current_performance
)
return PerformanceOptimizationResponse(
optimization_id=optimization.optimization_id,
agent_id=optimization.agent_id,
optimization_type=optimization.optimization_type,
target_metric=optimization.target_metric.value,
status=optimization.status,
performance_improvement=optimization.performance_improvement,
resource_savings=optimization.resource_savings,
cost_savings=optimization.cost_savings,
overall_efficiency_gain=optimization.overall_efficiency_gain,
created_at=optimization.created_at.isoformat(),
completed_at=optimization.completed_at.isoformat() if optimization.completed_at else None
)
except Exception as e:
logger.error(f"Error optimizing performance: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/optimization/{agent_id}")
async def get_optimization_history(
agent_id: str,
status: Optional[str] = Query(default=None, description="Filter by status"),
target_metric: Optional[str] = Query(default=None, description="Filter by target metric"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get optimization history for agent"""
try:
query = select(PerformanceOptimization).where(PerformanceOptimization.agent_id == agent_id)
if status:
query = query.where(PerformanceOptimization.status == status)
if target_metric:
query = query.where(PerformanceOptimization.target_metric == PerformanceMetric(target_metric))
optimizations = session.exec(
query.order_by(PerformanceOptimization.created_at.desc()).limit(limit)
).all()
return [
{
"optimization_id": optimization.optimization_id,
"agent_id": optimization.agent_id,
"optimization_type": optimization.optimization_type,
"target_metric": optimization.target_metric.value,
"status": optimization.status,
"baseline_performance": optimization.baseline_performance,
"optimized_performance": optimization.optimized_performance,
"baseline_cost": optimization.baseline_cost,
"optimized_cost": optimization.optimized_cost,
"performance_improvement": optimization.performance_improvement,
"resource_savings": optimization.resource_savings,
"cost_savings": optimization.cost_savings,
"overall_efficiency_gain": optimization.overall_efficiency_gain,
"optimization_duration": optimization.optimization_duration,
"iterations_required": optimization.iterations_required,
"convergence_achieved": optimization.convergence_achieved,
"created_at": optimization.created_at.isoformat(),
"completed_at": optimization.completed_at.isoformat() if optimization.completed_at else None
}
for optimization in optimizations
]
except Exception as e:
logger.error(f"Error getting optimization history for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/capabilities", response_model=CapabilityResponse)
async def create_capability(
capability_request: CapabilityRequest,
session: SessionDep
) -> CapabilityResponse:
"""Create agent capability"""
try:
capability_id = f"cap_{uuid4().hex[:8]}"
capability = AgentCapability(
capability_id=capability_id,
agent_id=capability_request.agent_id,
capability_name=capability_request.capability_name,
capability_type=capability_request.capability_type,
domain_area=capability_request.domain_area,
skill_level=capability_request.skill_level,
specialization_areas=capability_request.specialization_areas,
proficiency_score=min(1.0, capability_request.skill_level / 10.0),
created_at=datetime.utcnow()
)
session.add(capability)
session.commit()
session.refresh(capability)
return CapabilityResponse(
capability_id=capability.capability_id,
agent_id=capability.agent_id,
capability_name=capability.capability_name,
capability_type=capability.capability_type,
domain_area=capability.domain_area,
skill_level=capability.skill_level,
proficiency_score=capability.proficiency_score,
specialization_areas=capability.specialization_areas,
status=capability.status,
created_at=capability.created_at.isoformat()
)
except Exception as e:
logger.error(f"Error creating capability: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/capabilities/{agent_id}")
async def get_agent_capabilities(
agent_id: str,
capability_type: Optional[str] = Query(default=None, description="Filter by capability type"),
domain_area: Optional[str] = Query(default=None, description="Filter by domain area"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get agent capabilities"""
try:
query = select(AgentCapability).where(AgentCapability.agent_id == agent_id)
if capability_type:
query = query.where(AgentCapability.capability_type == capability_type)
if domain_area:
query = query.where(AgentCapability.domain_area == domain_area)
capabilities = session.exec(
query.order_by(AgentCapability.skill_level.desc()).limit(limit)
).all()
return [
{
"capability_id": capability.capability_id,
"agent_id": capability.agent_id,
"capability_name": capability.capability_name,
"capability_type": capability.capability_type,
"domain_area": capability.domain_area,
"skill_level": capability.skill_level,
"proficiency_score": capability.proficiency_score,
"experience_years": capability.experience_years,
"success_rate": capability.success_rate,
"average_quality": capability.average_quality,
"learning_rate": capability.learning_rate,
"adaptation_speed": capability.adaptation_speed,
"specialization_areas": capability.specialization_areas,
"sub_capabilities": capability.sub_capabilities,
"tool_proficiency": capability.tool_proficiency,
"certified": capability.certified,
"certification_level": capability.certification_level,
"status": capability.status,
"acquired_at": capability.acquired_at.isoformat(),
"last_improved": capability.last_improved.isoformat() if capability.last_improved else None
}
for capability in capabilities
]
except Exception as e:
logger.error(f"Error getting capabilities for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/performance-summary")
async def get_performance_summary(
agent_ids: List[str] = Query(default=[], description="List of agent IDs"),
metric: Optional[str] = Query(default="overall_score", description="Metric to summarize"),
period: str = Query(default="7d", description="Time period"),
session: SessionDep
) -> Dict[str, Any]:
"""Get performance summary for agents"""
try:
if not agent_ids:
# Get all agents if none specified
profiles = session.exec(select(AgentPerformanceProfile)).all()
agent_ids = [p.agent_id for p in profiles]
summaries = []
for agent_id in agent_ids:
profile = session.exec(
select(AgentPerformanceProfile).where(AgentPerformanceProfile.agent_id == agent_id)
).first()
if profile:
summaries.append({
"agent_id": agent_id,
"overall_score": profile.overall_score,
"performance_metrics": profile.performance_metrics,
"resource_efficiency": profile.resource_efficiency,
"cost_per_task": profile.cost_per_task,
"throughput": profile.throughput,
"average_latency": profile.average_latency,
"specialization_areas": profile.specialization_areas,
"last_assessed": profile.last_assessed.isoformat() if profile.last_assessed else None
})
# Calculate summary statistics
if summaries:
overall_scores = [s["overall_score"] for s in summaries]
avg_score = sum(overall_scores) / len(overall_scores)
return {
"period": period,
"agent_count": len(summaries),
"average_score": avg_score,
"top_performers": sorted(summaries, key=lambda x: x["overall_score"], reverse=True)[:10],
"performance_distribution": {
"excellent": len([s for s in summaries if s["overall_score"] >= 80]),
"good": len([s for s in summaries if 60 <= s["overall_score"] < 80]),
"average": len([s for s in summaries if 40 <= s["overall_score"] < 60]),
"below_average": len([s for s in summaries if s["overall_score"] < 40])
},
"specialization_distribution": self.calculate_specialization_distribution(summaries)
}
else:
return {
"period": period,
"agent_count": 0,
"average_score": 0.0,
"top_performers": [],
"performance_distribution": {},
"specialization_distribution": {}
}
except Exception as e:
logger.error(f"Error getting performance summary: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
def calculate_specialization_distribution(summaries: List[Dict[str, Any]]) -> Dict[str, int]:
"""Calculate specialization distribution"""
distribution = {}
for summary in summaries:
for area in summary["specialization_areas"]:
distribution[area] = distribution.get(area, 0) + 1
return distribution
@router.get("/health")
async def health_check() -> Dict[str, Any]:
"""Health check for agent performance service"""
return {
"status": "healthy",
"timestamp": datetime.utcnow().isoformat(),
"version": "1.0.0",
"services": {
"meta_learning_engine": "operational",
"resource_manager": "operational",
"performance_optimizer": "operational",
"performance_service": "operational"
}
}

View File

@@ -0,0 +1,804 @@
"""
Marketplace Analytics API Endpoints
REST API for analytics, insights, reporting, and dashboards
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.analytics_service import MarketplaceAnalytics
from ..domain.analytics import (
MarketMetric, MarketInsight, AnalyticsReport, DashboardConfig,
AnalyticsPeriod, MetricType, InsightType, ReportType
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/analytics", tags=["analytics"])
# Pydantic models for API requests/responses
class MetricResponse(BaseModel):
"""Response model for market metric"""
metric_name: str
metric_type: str
period_type: str
value: float
previous_value: Optional[float]
change_percentage: Optional[float]
unit: str
category: str
recorded_at: str
period_start: str
period_end: str
breakdown: Dict[str, Any]
comparisons: Dict[str, Any]
class InsightResponse(BaseModel):
"""Response model for market insight"""
id: str
insight_type: str
title: str
description: str
confidence_score: float
impact_level: str
related_metrics: List[str]
time_horizon: str
recommendations: List[str]
suggested_actions: List[Dict[str, Any]]
created_at: str
expires_at: Optional[str]
insight_data: Dict[str, Any]
class DashboardResponse(BaseModel):
"""Response model for dashboard configuration"""
dashboard_id: str
name: str
description: str
dashboard_type: str
layout: Dict[str, Any]
widgets: List[Dict[str, Any]]
filters: List[Dict[str, Any]]
refresh_interval: int
auto_refresh: bool
owner_id: str
status: str
created_at: str
updated_at: str
class ReportRequest(BaseModel):
"""Request model for generating analytics report"""
report_type: ReportType
period_type: AnalyticsPeriod
start_date: str
end_date: str
filters: Dict[str, Any] = Field(default_factory=dict)
include_charts: bool = Field(default=True)
format: str = Field(default="json")
class MarketOverviewResponse(BaseModel):
"""Response model for market overview"""
timestamp: str
period: str
metrics: Dict[str, Any]
insights: List[Dict[str, Any]]
alerts: List[Dict[str, Any]]
summary: Dict[str, Any]
class AnalyticsSummaryResponse(BaseModel):
"""Response model for analytics summary"""
period_type: str
start_time: str
end_time: str
metrics_collected: int
insights_generated: int
market_data: Dict[str, Any]
# API Endpoints
@router.post("/data-collection", response_model=AnalyticsSummaryResponse)
async def collect_market_data(
period_type: AnalyticsPeriod = Query(default=AnalyticsPeriod.DAILY, description="Collection period"),
session: SessionDep
) -> AnalyticsSummaryResponse:
"""Collect market data for analytics"""
analytics_service = MarketplaceAnalytics(session)
try:
result = await analytics_service.collect_market_data(period_type)
return AnalyticsSummaryResponse(**result)
except Exception as e:
logger.error(f"Error collecting market data: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/insights", response_model=Dict[str, Any])
async def get_market_insights(
time_period: str = Query(default="daily", description="Time period: daily, weekly, monthly"),
insight_type: Optional[str] = Query(default=None, description="Filter by insight type"),
impact_level: Optional[str] = Query(default=None, description="Filter by impact level"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> Dict[str, Any]:
"""Get market insights and analysis"""
analytics_service = MarketplaceAnalytics(session)
try:
result = await analytics_service.generate_insights(time_period)
# Apply filters if provided
if insight_type or impact_level:
filtered_insights = {}
for type_name, insights in result["insight_groups"].items():
filtered = insights
if insight_type:
filtered = [i for i in filtered if i["type"] == insight_type]
if impact_level:
filtered = [i for i in filtered if i["impact"] == impact_level]
if filtered:
filtered_insights[type_name] = filtered[:limit]
result["insight_groups"] = filtered_insights
result["total_insights"] = sum(len(insights) for insights in filtered_insights.values())
return result
except Exception as e:
logger.error(f"Error getting market insights: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/metrics", response_model=List[MetricResponse])
async def get_market_metrics(
period_type: AnalyticsPeriod = Query(default=AnalyticsPeriod.DAILY, description="Period type"),
metric_name: Optional[str] = Query(default=None, description="Filter by metric name"),
category: Optional[str] = Query(default=None, description="Filter by category"),
geographic_region: Optional[str] = Query(default=None, description="Filter by region"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[MetricResponse]:
"""Get market metrics with filters"""
try:
query = select(MarketMetric).where(MarketMetric.period_type == period_type)
if metric_name:
query = query.where(MarketMetric.metric_name == metric_name)
if category:
query = query.where(MarketMetric.category == category)
if geographic_region:
query = query.where(MarketMetric.geographic_region == geographic_region)
metrics = session.exec(
query.order_by(MarketMetric.recorded_at.desc()).limit(limit)
).all()
return [
MetricResponse(
metric_name=metric.metric_name,
metric_type=metric.metric_type.value,
period_type=metric.period_type.value,
value=metric.value,
previous_value=metric.previous_value,
change_percentage=metric.change_percentage,
unit=metric.unit,
category=metric.category,
recorded_at=metric.recorded_at.isoformat(),
period_start=metric.period_start.isoformat(),
period_end=metric.period_end.isoformat(),
breakdown=metric.breakdown,
comparisons=metric.comparisons
)
for metric in metrics
]
except Exception as e:
logger.error(f"Error getting market metrics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/overview", response_model=MarketOverviewResponse)
async def get_market_overview(
session: SessionDep
) -> MarketOverviewResponse:
"""Get comprehensive market overview"""
analytics_service = MarketplaceAnalytics(session)
try:
overview = await analytics_service.get_market_overview()
return MarketOverviewResponse(**overview)
except Exception as e:
logger.error(f"Error getting market overview: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/dashboards", response_model=DashboardResponse)
async def create_dashboard(
owner_id: str,
dashboard_type: str = Query(default="default", description="Dashboard type: default, executive"),
name: Optional[str] = Query(default=None, description="Custom dashboard name"),
session: SessionDep
) -> DashboardResponse:
"""Create analytics dashboard"""
analytics_service = MarketplaceAnalytics(session)
try:
result = await analytics_service.create_dashboard(owner_id, dashboard_type)
# Get the created dashboard details
dashboard = session.exec(
select(DashboardConfig).where(DashboardConfig.dashboard_id == result["dashboard_id"])
).first()
if not dashboard:
raise HTTPException(status_code=404, detail="Dashboard not found after creation")
return DashboardResponse(
dashboard_id=dashboard.dashboard_id,
name=dashboard.name,
description=dashboard.description,
dashboard_type=dashboard.dashboard_type,
layout=dashboard.layout,
widgets=dashboard.widgets,
filters=dashboard.filters,
refresh_interval=dashboard.refresh_interval,
auto_refresh=dashboard.auto_refresh,
owner_id=dashboard.owner_id,
status=dashboard.status,
created_at=dashboard.created_at.isoformat(),
updated_at=dashboard.updated_at.isoformat()
)
except Exception as e:
logger.error(f"Error creating dashboard: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/dashboards/{dashboard_id}", response_model=DashboardResponse)
async def get_dashboard(
dashboard_id: str,
session: SessionDep
) -> DashboardResponse:
"""Get dashboard configuration"""
try:
dashboard = session.exec(
select(DashboardConfig).where(DashboardConfig.dashboard_id == dashboard_id)
).first()
if not dashboard:
raise HTTPException(status_code=404, detail="Dashboard not found")
return DashboardResponse(
dashboard_id=dashboard.dashboard_id,
name=dashboard.name,
description=dashboard.description,
dashboard_type=dashboard.dashboard_type,
layout=dashboard.layout,
widgets=dashboard.widgets,
filters=dashboard.filters,
refresh_interval=dashboard.refresh_interval,
auto_refresh=dashboard.auto_refresh,
owner_id=dashboard.owner_id,
status=dashboard.status,
created_at=dashboard.created_at.isoformat(),
updated_at=dashboard.updated_at.isoformat()
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting dashboard {dashboard_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/dashboards")
async def list_dashboards(
owner_id: Optional[str] = Query(default=None, description="Filter by owner ID"),
dashboard_type: Optional[str] = Query(default=None, description="Filter by dashboard type"),
status: Optional[str] = Query(default=None, description="Filter by status"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[DashboardResponse]:
"""List analytics dashboards with filters"""
try:
query = select(DashboardConfig)
if owner_id:
query = query.where(DashboardConfig.owner_id == owner_id)
if dashboard_type:
query = query.where(DashboardConfig.dashboard_type == dashboard_type)
if status:
query = query.where(DashboardConfig.status == status)
dashboards = session.exec(
query.order_by(DashboardConfig.created_at.desc()).limit(limit)
).all()
return [
DashboardResponse(
dashboard_id=dashboard.dashboard_id,
name=dashboard.name,
description=dashboard.description,
dashboard_type=dashboard.dashboard_type,
layout=dashboard.layout,
widgets=dashboard.widgets,
filters=dashboard.filters,
refresh_interval=dashboard.refresh_interval,
auto_refresh=dashboard.auto_refresh,
owner_id=dashboard.owner_id,
status=dashboard.status,
created_at=dashboard.created_at.isoformat(),
updated_at=dashboard.updated_at.isoformat()
)
for dashboard in dashboards
]
except Exception as e:
logger.error(f"Error listing dashboards: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/reports", response_model=Dict[str, Any])
async def generate_report(
report_request: ReportRequest,
session: SessionDep
) -> Dict[str, Any]:
"""Generate analytics report"""
try:
# Parse dates
start_date = datetime.fromisoformat(report_request.start_date)
end_date = datetime.fromisoformat(report_request.end_date)
# Create report record
report = AnalyticsReport(
report_id=f"report_{uuid4().hex[:8]}",
report_type=report_request.report_type,
title=f"{report_request.report_type.value.title()} Report",
description=f"Analytics report for {report_request.period_type.value} period",
period_type=report_request.period_type,
start_date=start_date,
end_date=end_date,
filters=report_request.filters,
generated_by="api",
status="generated"
)
session.add(report)
session.commit()
session.refresh(report)
# Generate report content based on type
if report_request.report_type == ReportType.MARKET_OVERVIEW:
content = await self.generate_market_overview_report(
session, report_request.period_type, start_date, end_date, report_request.filters
)
elif report_request.report_type == ReportType.AGENT_PERFORMANCE:
content = await self.generate_agent_performance_report(
session, report_request.period_type, start_date, end_date, report_request.filters
)
elif report_request.report_type == ReportType.ECONOMIC_ANALYSIS:
content = await self.generate_economic_analysis_report(
session, report_request.period_type, start_date, end_date, report_request.filters
)
else:
content = {"error": "Report type not implemented yet"}
# Update report with content
report.summary = content.get("summary", "")
report.key_findings = content.get("key_findings", [])
report.recommendations = content.get("recommendations", [])
report.data_sections = content.get("data_sections", [])
report.charts = content.get("charts", []) if report_request.include_charts else []
report.tables = content.get("tables", [])
session.commit()
return {
"report_id": report.report_id,
"report_type": report.report_type.value,
"title": report.title,
"period": f"{report_request.period_type.value} from {report_request.start_date} to {report_request.end_date}",
"summary": report.summary,
"key_findings": report.key_findings,
"recommendations": report.recommendations,
"generated_at": report.generated_at.isoformat(),
"format": report_request.format
}
except Exception as e:
logger.error(f"Error generating report: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/reports/{report_id}")
async def get_report(
report_id: str,
format: str = Query(default="json", description="Response format: json, csv, pdf"),
session: SessionDep
) -> Dict[str, Any]:
"""Get generated analytics report"""
try:
report = session.exec(
select(AnalyticsReport).where(AnalyticsReport.report_id == report_id)
).first()
if not report:
raise HTTPException(status_code=404, detail="Report not found")
response_data = {
"report_id": report.report_id,
"report_type": report.report_type.value,
"title": report.title,
"description": report.description,
"period_type": report.period_type.value,
"start_date": report.start_date.isoformat(),
"end_date": report.end_date.isoformat(),
"summary": report.summary,
"key_findings": report.key_findings,
"recommendations": report.recommendations,
"data_sections": report.data_sections,
"charts": report.charts,
"tables": report.tables,
"generated_at": report.generated_at.isoformat(),
"status": report.status
}
# Format response based on requested format
if format == "json":
return response_data
elif format == "csv":
# Convert to CSV format (simplified)
return {"csv_data": self.convert_to_csv(response_data)}
elif format == "pdf":
# Convert to PDF format (simplified)
return {"pdf_url": f"/api/v1/analytics/reports/{report_id}/pdf"}
else:
return response_data
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting report {report_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/alerts")
async def get_analytics_alerts(
severity: Optional[str] = Query(default=None, description="Filter by severity level"),
status: Optional[str] = Query(default="active", description="Filter by status"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get analytics alerts"""
try:
from ..domain.analytics import AnalyticsAlert
query = select(AnalyticsAlert)
if severity:
query = query.where(AnalyticsAlert.severity == severity)
if status:
query = query.where(AnalyticsAlert.status == status)
alerts = session.exec(
query.order_by(AnalyticsAlert.created_at.desc()).limit(limit)
).all()
return [
{
"alert_id": alert.alert_id,
"rule_id": alert.rule_id,
"alert_type": alert.alert_type,
"title": alert.title,
"message": alert.message,
"severity": alert.severity,
"confidence": alert.confidence,
"trigger_value": alert.trigger_value,
"threshold_value": alert.threshold_value,
"affected_metrics": alert.affected_metrics,
"status": alert.status,
"created_at": alert.created_at.isoformat(),
"expires_at": alert.expires_at.isoformat() if alert.expires_at else None
}
for alert in alerts
]
except Exception as e:
logger.error(f"Error getting analytics alerts: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/kpi")
async def get_key_performance_indicators(
period_type: AnalyticsPeriod = Query(default=AnalyticsPeriod.DAILY, description="Period type"),
session: SessionDep
) -> Dict[str, Any]:
"""Get key performance indicators"""
try:
# Get latest metrics for KPIs
end_time = datetime.utcnow()
if period_type == AnalyticsPeriod.DAILY:
start_time = end_time - timedelta(days=1)
elif period_type == AnalyticsPeriod.WEEKLY:
start_time = end_time - timedelta(weeks=1)
elif period_type == AnalyticsPeriod.MONTHLY:
start_time = end_time - timedelta(days=30)
else:
start_time = end_time - timedelta(hours=1)
metrics = session.exec(
select(MarketMetric).where(
and_(
MarketMetric.period_type == period_type,
MarketMetric.period_start >= start_time,
MarketMetric.period_end <= end_time
)
).order_by(MarketMetric.recorded_at.desc())
).all()
# Calculate KPIs
kpis = {}
for metric in metrics:
if metric.metric_name in ["transaction_volume", "active_agents", "average_price", "success_rate"]:
kpis[metric.metric_name] = {
"value": metric.value,
"unit": metric.unit,
"change_percentage": metric.change_percentage,
"trend": "up" if metric.change_percentage and metric.change_percentage > 0 else "down",
"status": self.get_kpi_status(metric.metric_name, metric.value, metric.change_percentage)
}
return {
"period_type": period_type.value,
"start_time": start_time.isoformat(),
"end_time": end_time.isoformat(),
"kpis": kpis,
"overall_health": self.calculate_overall_health(kpis)
}
except Exception as e:
logger.error(f"Error getting KPIs: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
# Helper methods
async def generate_market_overview_report(
session: Session,
period_type: AnalyticsPeriod,
start_date: datetime,
end_date: datetime,
filters: Dict[str, Any]
) -> Dict[str, Any]:
"""Generate market overview report content"""
# Get metrics for the period
metrics = session.exec(
select(MarketMetric).where(
and_(
MarketMetric.period_type == period_type,
MarketMetric.period_start >= start_date,
MarketMetric.period_end <= end_date
)
).order_by(MarketMetric.recorded_at.desc())
).all()
# Get insights for the period
insights = session.exec(
select(MarketInsight).where(
and_(
MarketInsight.created_at >= start_date,
MarketInsight.created_at <= end_date
)
).order_by(MarketInsight.created_at.desc())
).all()
return {
"summary": f"Market overview for {period_type.value} period from {start_date.date()} to {end_date.date()}",
"key_findings": [
f"Total transaction volume: {next((m.value for m in metrics if m.metric_name == 'transaction_volume'), 0):.2f} AITBC",
f"Active agents: {next((int(m.value) for m in metrics if m.metric_name == 'active_agents'), 0)}",
f"Average success rate: {next((m.value for m in metrics if m.metric_name == 'success_rate'), 0):.1f}%",
f"Total insights generated: {len(insights)}"
],
"recommendations": [
"Monitor transaction volume trends for growth opportunities",
"Focus on improving agent success rates",
"Analyze geographic distribution for market expansion"
],
"data_sections": [
{
"title": "Transaction Metrics",
"data": {
metric.metric_name: metric.value
for metric in metrics
if metric.category == "financial"
}
},
{
"title": "Agent Metrics",
"data": {
metric.metric_name: metric.value
for metric in metrics
if metric.category == "agents"
}
}
],
"charts": [
{
"type": "line",
"title": "Transaction Volume Trend",
"data": [m.value for m in metrics if m.metric_name == "transaction_volume"]
},
{
"type": "pie",
"title": "Agent Distribution by Tier",
"data": next((m.breakdown.get("by_tier", {}) for m in metrics if m.metric_name == "active_agents"), {})
}
]
}
async def generate_agent_performance_report(
session: Session,
period_type: AnalyticsPeriod,
start_date: datetime,
end_date: datetime,
filters: Dict[str, Any]
) -> Dict[str, Any]:
"""Generate agent performance report content"""
# Mock implementation - would query actual agent performance data
return {
"summary": f"Agent performance report for {period_type.value} period",
"key_findings": [
"Top performing agents show 20% higher success rates",
"Agent retention rate improved by 5%",
"Average agent earnings increased by 10%"
],
"recommendations": [
"Provide additional training for lower-performing agents",
"Implement recognition programs for top performers",
"Optimize agent matching algorithms"
],
"data_sections": [
{
"title": "Performance Metrics",
"data": {
"top_performers": 25,
"average_success_rate": 87.5,
"retention_rate": 92.0
}
}
]
}
async def generate_economic_analysis_report(
session: Session,
period_type: AnalyticsPeriod,
start_date: datetime,
end_date: datetime,
filters: Dict[str, Any]
) -> Dict[str, Any]:
"""Generate economic analysis report content"""
# Mock implementation - would query actual economic data
return {
"summary": f"Economic analysis for {period_type.value} period",
"key_findings": [
"Market showed 15% growth in transaction volume",
"Price stability maintained across all regions",
"Supply/demand balance improved by 10%"
],
"recommendations": [
"Continue current pricing strategies",
"Focus on market expansion in high-growth regions",
"Monitor supply/demand ratios for optimization"
],
"data_sections": [
{
"title": "Economic Indicators",
"data": {
"market_growth": 15.0,
"price_stability": 95.0,
"supply_demand_balance": 1.1
}
}
]
}
def get_kpi_status(metric_name: str, value: float, change_percentage: Optional[float]) -> str:
"""Get KPI status based on value and change"""
if metric_name == "success_rate":
if value >= 90:
return "excellent"
elif value >= 80:
return "good"
elif value >= 70:
return "fair"
else:
return "poor"
elif metric_name == "transaction_volume":
if change_percentage and change_percentage > 10:
return "excellent"
elif change_percentage and change_percentage > 0:
return "good"
elif change_percentage and change_percentage < -10:
return "poor"
else:
return "fair"
else:
return "good"
def calculate_overall_health(kpis: Dict[str, Any]) -> str:
"""Calculate overall market health"""
if not kpis:
return "unknown"
# Count KPIs by status
status_counts = {}
for kpi_data in kpis.values():
status = kpi_data.get("status", "fair")
status_counts[status] = status_counts.get(status, 0) + 1
total_kpis = len(kpis)
# Determine overall health
if status_counts.get("excellent", 0) >= total_kpis * 0.6:
return "excellent"
elif status_counts.get("excellent", 0) + status_counts.get("good", 0) >= total_kpis * 0.7:
return "good"
elif status_counts.get("poor", 0) >= total_kpis * 0.3:
return "poor"
else:
return "fair"
def convert_to_csv(data: Dict[str, Any]) -> str:
"""Convert report data to CSV format (simplified)"""
csv_lines = []
# Add header
csv_lines.append("Metric,Value,Unit,Change,Trend,Status")
# Add KPI data if available
if "kpis" in data:
for metric_name, kpi_data in data["kpis"].items():
csv_lines.append(
f"{metric_name},{kpi_data.get('value', '')},{kpi_data.get('unit', '')},"
f"{kpi_data.get('change_percentage', '')}%,{kpi_data.get('trend', '')},"
f"{kpi_data.get('status', '')}"
)
return "\n".join(csv_lines)

View File

@@ -0,0 +1,843 @@
"""
Certification and Partnership API Endpoints
REST API for agent certification, partnership programs, and badge system
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.certification_service import (
CertificationAndPartnershipService, CertificationSystem, PartnershipManager, BadgeSystem
)
from ..domain.certification import (
AgentCertification, CertificationRequirement, VerificationRecord,
PartnershipProgram, AgentPartnership, AchievementBadge, AgentBadge,
CertificationLevel, CertificationStatus, VerificationType,
PartnershipType, BadgeType
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/certification", tags=["certification"])
# Pydantic models for API requests/responses
class CertificationRequest(BaseModel):
"""Request model for agent certification"""
agent_id: str
level: CertificationLevel
certification_type: str = Field(default="standard", description="Certification type")
issued_by: str = Field(description="Who is issuing the certification")
class CertificationResponse(BaseModel):
"""Response model for agent certification"""
certification_id: str
agent_id: str
certification_level: str
certification_type: str
status: str
issued_by: str
issued_at: str
expires_at: Optional[str]
verification_hash: str
requirements_met: List[str]
granted_privileges: List[str]
access_levels: List[str]
class PartnershipApplicationRequest(BaseModel):
"""Request model for partnership application"""
agent_id: str
program_id: str
application_data: Dict[str, Any] = Field(default_factory=dict, description="Application data")
class PartnershipResponse(BaseModel):
"""Response model for partnership"""
partnership_id: str
agent_id: str
program_id: str
partnership_type: str
current_tier: str
status: str
applied_at: str
approved_at: Optional[str]
performance_score: float
total_earnings: float
earned_benefits: List[str]
class BadgeCreationRequest(BaseModel):
"""Request model for badge creation"""
badge_name: str
badge_type: BadgeType
description: str
criteria: Dict[str, Any] = Field(description="Badge criteria and thresholds")
created_by: str
class BadgeAwardRequest(BaseModel):
"""Request model for badge award"""
agent_id: str
badge_id: str
awarded_by: str
award_reason: str = Field(default="", description="Reason for awarding badge")
context: Dict[str, Any] = Field(default_factory=dict, description="Award context")
class BadgeResponse(BaseModel):
"""Response model for badge"""
badge_id: str
badge_name: str
badge_type: str
description: str
rarity: str
point_value: int
category: str
awarded_at: str
is_featured: bool
badge_icon: str
class AgentCertificationSummary(BaseModel):
"""Response model for agent certification summary"""
agent_id: str
certifications: Dict[str, Any]
partnerships: Dict[str, Any]
badges: Dict[str, Any]
verifications: Dict[str, Any]
# API Endpoints
@router.post("/certify", response_model=CertificationResponse)
async def certify_agent(
certification_request: CertificationRequest,
session: SessionDep
) -> CertificationResponse:
"""Certify an agent at a specific level"""
certification_service = CertificationAndPartnershipService(session)
try:
success, certification, errors = await certification_service.certification_system.certify_agent(
session=session,
agent_id=certification_request.agent_id,
level=certification_request.level,
issued_by=certification_request.issued_by,
certification_type=certification_request.certification_type
)
if not success:
raise HTTPException(status_code=400, detail=f"Certification failed: {'; '.join(errors)}")
return CertificationResponse(
certification_id=certification.certification_id,
agent_id=certification.agent_id,
certification_level=certification.certification_level.value,
certification_type=certification.certification_type,
status=certification.status.value,
issued_by=certification.issued_by,
issued_at=certification.issued_at.isoformat(),
expires_at=certification.expires_at.isoformat() if certification.expires_at else None,
verification_hash=certification.verification_hash,
requirements_met=certification.requirements_met,
granted_privileges=certification.granted_privileges,
access_levels=certification.access_levels
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error certifying agent: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/certifications/{certification_id}/renew")
async def renew_certification(
certification_id: str,
renewed_by: str,
session: SessionDep
) -> Dict[str, Any]:
"""Renew an existing certification"""
certification_service = CertificationAndPartnershipService(session)
try:
success, message = await certification_service.certification_system.renew_certification(
session=session,
certification_id=certification_id,
renewed_by=renewed_by
)
if not success:
raise HTTPException(status_code=400, detail=message)
return {
"success": True,
"message": message,
"certification_id": certification_id
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error renewing certification: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/certifications/{agent_id}")
async def get_agent_certifications(
agent_id: str,
status: Optional[str] = Query(default=None, description="Filter by status"),
session: SessionDep
) -> List[CertificationResponse]:
"""Get certifications for an agent"""
try:
query = select(AgentCertification).where(AgentCertification.agent_id == agent_id)
if status:
query = query.where(AgentCertification.status == CertificationStatus(status))
certifications = session.exec(
query.order_by(AgentCertification.issued_at.desc())
).all()
return [
CertificationResponse(
certification_id=cert.certification_id,
agent_id=cert.agent_id,
certification_level=cert.certification_level.value,
certification_type=cert.certification_type,
status=cert.status.value,
issued_by=cert.issued_by,
issued_at=cert.issued_at.isoformat(),
expires_at=cert.expires_at.isoformat() if cert.expires_at else None,
verification_hash=cert.verification_hash,
requirements_met=cert.requirements_met,
granted_privileges=cert.granted_privileges,
access_levels=cert.access_levels
)
for cert in certifications
]
except Exception as e:
logger.error(f"Error getting certifications for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/partnerships/programs")
async def create_partnership_program(
program_name: str,
program_type: PartnershipType,
description: str,
created_by: str,
tier_levels: List[str] = Field(default_factory=lambda: ["basic", "premium"]),
max_participants: Optional[int] = Field(default=None, description="Maximum participants"),
launch_immediately: bool = Field(default=False, description="Launch program immediately"),
session: SessionDep
) -> Dict[str, Any]:
"""Create a new partnership program"""
partnership_manager = PartnershipManager()
try:
program = await partnership_manager.create_partnership_program(
session=session,
program_name=program_name,
program_type=program_type,
description=description,
created_by=created_by,
tier_levels=tier_levels,
max_participants=max_participants,
launch_immediately=launch_immediately
)
return {
"program_id": program.program_id,
"program_name": program.program_name,
"program_type": program.program_type.value,
"status": program.status,
"tier_levels": program.tier_levels,
"max_participants": program.max_participants,
"current_participants": program.current_participants,
"created_at": program.created_at.isoformat(),
"launched_at": program.launched_at.isoformat() if program.launched_at else None
}
except Exception as e:
logger.error(f"Error creating partnership program: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/partnerships/apply", response_model=PartnershipResponse)
async def apply_for_partnership(
application: PartnershipApplicationRequest,
session: SessionDep
) -> PartnershipResponse:
"""Apply for a partnership program"""
partnership_manager = PartnershipManager()
try:
success, partnership, errors = await partnership_manager.apply_for_partnership(
session=session,
agent_id=application.agent_id,
program_id=application.program_id,
application_data=application.application_data
)
if not success:
raise HTTPException(status_code=400, detail=f"Application failed: {'; '.join(errors)}")
return PartnershipResponse(
partnership_id=partnership.partnership_id,
agent_id=partnership.agent_id,
program_id=partnership.program_id,
partnership_type=partnership.partnership_type.value,
current_tier=partnership.current_tier,
status=partnership.status,
applied_at=partnership.applied_at.isoformat(),
approved_at=partnership.approved_at.isoformat() if partnership.approved_at else None,
performance_score=partnership.performance_score,
total_earnings=partnership.total_earnings,
earned_benefits=partnership.earned_benefits
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error applying for partnership: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/partnerships/{agent_id}")
async def get_agent_partnerships(
agent_id: str,
status: Optional[str] = Query(default=None, description="Filter by status"),
partnership_type: Optional[str] = Query(default=None, description="Filter by partnership type"),
session: SessionDep
) -> List[PartnershipResponse]:
"""Get partnerships for an agent"""
try:
query = select(AgentPartnership).where(AgentPartnership.agent_id == agent_id)
if status:
query = query.where(AgentPartnership.status == status)
if partnership_type:
query = query.where(AgentPartnership.partnership_type == PartnershipType(partnership_type))
partnerships = session.exec(
query.order_by(AgentPartnership.applied_at.desc())
).all()
return [
PartnershipResponse(
partnership_id=partner.partnership_id,
agent_id=partner.agent_id,
program_id=partner.program_id,
partnership_type=partner.partnership_type.value,
current_tier=partner.current_tier,
status=partner.status,
applied_at=partner.applied_at.isoformat(),
approved_at=partner.approved_at.isoformat() if partner.approved_at else None,
performance_score=partner.performance_score,
total_earnings=partner.total_earnings,
earned_benefits=partner.earned_benefits
)
for partner in partnerships
]
except Exception as e:
logger.error(f"Error getting partnerships for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/partnerships/programs")
async def list_partnership_programs(
partnership_type: Optional[str] = Query(default=None, description="Filter by partnership type"),
status: Optional[str] = Query(default="active", description="Filter by status"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""List available partnership programs"""
try:
query = select(PartnershipProgram)
if partnership_type:
query = query.where(PartnershipProgram.program_type == PartnershipType(partnership_type))
if status:
query = query.where(PartnershipProgram.status == status)
programs = session.exec(
query.order_by(PartnershipProgram.created_at.desc()).limit(limit)
).all()
return [
{
"program_id": program.program_id,
"program_name": program.program_name,
"program_type": program.program_type.value,
"description": program.description,
"status": program.status,
"tier_levels": program.tier_levels,
"max_participants": program.max_participants,
"current_participants": program.current_participants,
"created_at": program.created_at.isoformat(),
"launched_at": program.launched_at.isoformat() if program.launched_at else None,
"expires_at": program.expires_at.isoformat() if program.expires_at else None
}
for program in programs
]
except Exception as e:
logger.error(f"Error listing partnership programs: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/badges")
async def create_badge(
badge_request: BadgeCreationRequest,
session: SessionDep
) -> Dict[str, Any]:
"""Create a new achievement badge"""
badge_system = BadgeSystem()
try:
badge = await badge_system.create_badge(
session=session,
badge_name=badge_request.badge_name,
badge_type=badge_request.badge_type,
description=badge_request.description,
criteria=badge_request.criteria,
created_by=badge_request.created_by
)
return {
"badge_id": badge.badge_id,
"badge_name": badge.badge_name,
"badge_type": badge.badge_type.value,
"description": badge.description,
"rarity": badge.rarity,
"point_value": badge.point_value,
"category": badge.category,
"is_active": badge.is_active,
"created_at": badge.created_at.isoformat(),
"available_from": badge.available_from.isoformat(),
"available_until": badge.available_until.isoformat() if badge.available_until else None
}
except Exception as e:
logger.error(f"Error creating badge: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/badges/award", response_model=BadgeResponse)
async def award_badge(
badge_request: BadgeAwardRequest,
session: SessionDep
) -> BadgeResponse:
"""Award a badge to an agent"""
badge_system = BadgeSystem()
try:
success, agent_badge, message = await badge_system.award_badge(
session=session,
agent_id=badge_request.agent_id,
badge_id=badge_request.badge_id,
awarded_by=badge_request.awarded_by,
award_reason=badge_request.award_reason,
context=badge_request.context
)
if not success:
raise HTTPException(status_code=400, detail=message)
# Get badge details
badge = session.exec(
select(AchievementBadge).where(AchievementBadge.badge_id == badge_request.badge_id)
).first()
return BadgeResponse(
badge_id=badge.badge_id,
badge_name=badge.badge_name,
badge_type=badge.badge_type.value,
description=badge.description,
rarity=badge.rarity,
point_value=badge.point_value,
category=badge.category,
awarded_at=agent_badge.awarded_at.isoformat(),
is_featured=agent_badge.is_featured,
badge_icon=badge.badge_icon
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error awarding badge: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/badges/{agent_id}")
async def get_agent_badges(
agent_id: str,
badge_type: Optional[str] = Query(default=None, description="Filter by badge type"),
category: Optional[str] = Query(default=None, description="Filter by category"),
featured_only: bool = Query(default=False, description="Only featured badges"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[BadgeResponse]:
"""Get badges for an agent"""
try:
query = select(AgentBadge).where(AgentBadge.agent_id == agent_id)
if badge_type:
query = query.join(AchievementBadge).where(AchievementBadge.badge_type == BadgeType(badge_type))
if category:
query = query.join(AchievementBadge).where(AchievementBadge.category == category)
if featured_only:
query = query.where(AgentBadge.is_featured == True)
agent_badges = session.exec(
query.order_by(AgentBadge.awarded_at.desc()).limit(limit)
).all()
# Get badge details
badge_ids = [ab.badge_id for ab in agent_badges]
badges = session.exec(
select(AchievementBadge).where(AchievementBadge.badge_id.in_(badge_ids))
).all()
badge_map = {badge.badge_id: badge for badge in badges}
return [
BadgeResponse(
badge_id=ab.badge_id,
badge_name=badge_map[ab.badge_id].badge_name,
badge_type=badge_map[ab.badge_id].badge_type.value,
description=badge_map[ab.badge_id].description,
rarity=badge_map[ab.badge_id].rarity,
point_value=badge_map[ab.badge_id].point_value,
category=badge_map[ab.badge_id].category,
awarded_at=ab.awarded_at.isoformat(),
is_featured=ab.is_featured,
badge_icon=badge_map[ab.badge_id].badge_icon
)
for ab in agent_badges if ab.badge_id in badge_map
]
except Exception as e:
logger.error(f"Error getting badges for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/badges")
async def list_available_badges(
badge_type: Optional[str] = Query(default=None, description="Filter by badge type"),
category: Optional[str] = Query(default=None, description="Filter by category"),
rarity: Optional[str] = Query(default=None, description="Filter by rarity"),
active_only: bool = Query(default=True, description="Only active badges"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""List available badges"""
try:
query = select(AchievementBadge)
if badge_type:
query = query.where(AchievementBadge.badge_type == BadgeType(badge_type))
if category:
query = query.where(AchievementBadge.category == category)
if rarity:
query = query.where(AchievementBadge.rarity == rarity)
if active_only:
query = query.where(AchievementBadge.is_active == True)
badges = session.exec(
query.order_by(AchievementBadge.created_at.desc()).limit(limit)
).all()
return [
{
"badge_id": badge.badge_id,
"badge_name": badge.badge_name,
"badge_type": badge.badge_type.value,
"description": badge.description,
"rarity": badge.rarity,
"point_value": badge.point_value,
"category": badge.category,
"is_active": badge.is_active,
"is_limited": badge.is_limited,
"max_awards": badge.max_awards,
"current_awards": badge.current_awards,
"created_at": badge.created_at.isoformat(),
"available_from": badge.available_from.isoformat(),
"available_until": badge.available_until.isoformat() if badge.available_until else None
}
for badge in badges
]
except Exception as e:
logger.error(f"Error listing available badges: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/badges/{agent_id}/check-automatic")
async def check_automatic_badges(
agent_id: str,
session: SessionDep
) -> Dict[str, Any]:
"""Check and award automatic badges for an agent"""
badge_system = BadgeSystem()
try:
awarded_badges = await badge_system.check_and_award_automatic_badges(session, agent_id)
return {
"agent_id": agent_id,
"badges_awarded": awarded_badges,
"total_awarded": len(awarded_badges),
"checked_at": datetime.utcnow().isoformat()
}
except Exception as e:
logger.error(f"Error checking automatic badges for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/summary/{agent_id}", response_model=AgentCertificationSummary)
async def get_agent_summary(
agent_id: str,
session: SessionDep
) -> AgentCertificationSummary:
"""Get comprehensive certification and partnership summary for an agent"""
certification_service = CertificationAndPartnershipService(session)
try:
summary = await certification_service.get_agent_certification_summary(agent_id)
return AgentCertificationSummary(**summary)
except Exception as e:
logger.error(f"Error getting certification summary for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/verification/{agent_id}")
async def get_verification_records(
agent_id: str,
verification_type: Optional[str] = Query(default=None, description="Filter by verification type"),
status: Optional[str] = Query(default=None, description="Filter by status"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get verification records for an agent"""
try:
query = select(VerificationRecord).where(VerificationRecord.agent_id == agent_id)
if verification_type:
query = query.where(VerificationRecord.verification_type == VerificationType(verification_type))
if status:
query = query.where(VerificationRecord.status == status)
verifications = session.exec(
query.order_by(VerificationRecord.requested_at.desc()).limit(limit)
).all()
return [
{
"verification_id": verification.verification_id,
"verification_type": verification.verification_type.value,
"verification_method": verification.verification_method,
"status": verification.status,
"requested_by": verification.requested_by,
"requested_at": verification.requested_at.isoformat(),
"started_at": verification.started_at.isoformat() if verification.started_at else None,
"completed_at": verification.completed_at.isoformat() if verification.completed_at else None,
"result_score": verification.result_score,
"failure_reasons": verification.failure_reasons,
"processing_time": verification.processing_time
}
for verification in verifications
]
except Exception as e:
logger.error(f"Error getting verification records for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/levels")
async def get_certification_levels(
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get available certification levels and requirements"""
try:
certification_system = CertificationSystem()
levels = []
for level, config in certification_system.certification_levels.items():
levels.append({
"level": level.value,
"requirements": config['requirements'],
"privileges": config['privileges'],
"validity_days": config['validity_days'],
"renewal_requirements": config['renewal_requirements']
})
return sorted(levels, key=lambda x: ['basic', 'intermediate', 'advanced', 'enterprise', 'premium'].index(x['level']))
except Exception as e:
logger.error(f"Error getting certification levels: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/requirements")
async def get_certification_requirements(
level: Optional[str] = Query(default=None, description="Filter by certification level"),
verification_type: Optional[str] = Query(default=None, description="Filter by verification type"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get certification requirements"""
try:
query = select(CertificationRequirement)
if level:
query = query.where(CertificationRequirement.certification_level == CertificationLevel(level))
if verification_type:
query = query.where(CertificationRequirement.verification_type == VerificationType(verification_type))
requirements = session.exec(
query.order_by(CertificationRequirement.certification_level, CertificationRequirement.requirement_name)
).all()
return [
{
"id": requirement.id,
"certification_level": requirement.certification_level.value,
"verification_type": requirement.verification_type.value,
"requirement_name": requirement.requirement_name,
"description": requirement.description,
"criteria": requirement.criteria,
"minimum_threshold": requirement.minimum_threshold,
"maximum_threshold": requirement.maximum_threshold,
"required_values": requirement.required_values,
"verification_method": requirement.verification_method,
"is_mandatory": requirement.is_mandatory,
"weight": requirement.weight,
"is_active": requirement.is_active
}
for requirement in requirements
]
except Exception as e:
logger.error(f"Error getting certification requirements: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/leaderboard")
async def get_certification_leaderboard(
category: str = Query(default="highest_level", description="Leaderboard category"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get certification leaderboard"""
try:
if category == "highest_level":
# Get agents with highest certification levels
query = select(AgentCertification).where(
AgentCertification.status == CertificationStatus.ACTIVE
)
elif category == "most_certifications":
# Get agents with most certifications
query = select(AgentCertification).where(
AgentCertification.status == CertificationStatus.ACTIVE
)
else:
query = select(AgentCertification).where(
AgentCertification.status == CertificationStatus.ACTIVE
)
certifications = session.exec(
query.order_by(AgentCertification.issued_at.desc()).limit(limit * 2) # Get more to account for duplicates
).all()
# Group by agent and calculate scores
agent_scores = {}
for cert in certifications:
if cert.agent_id not in agent_scores:
agent_scores[cert.agent_id] = {
'agent_id': cert.agent_id,
'highest_level': cert.certification_level.value,
'certification_count': 0,
'total_privileges': 0,
'latest_certification': cert.issued_at
}
agent_scores[cert.agent_id]['certification_count'] += 1
agent_scores[cert.agent_id]['total_privileges'] += len(cert.granted_privileges)
# Update highest level if current is higher
level_order = ['basic', 'intermediate', 'advanced', 'enterprise', 'premium']
current_level_index = level_order.index(agent_scores[cert.agent_id]['highest_level'])
new_level_index = level_order.index(cert.certification_level.value)
if new_level_index > current_level_index:
agent_scores[cert.agent_id]['highest_level'] = cert.certification_level.value
# Update latest certification
if cert.issued_at > agent_scores[cert.agent_id]['latest_certification']:
agent_scores[cert.agent_id]['latest_certification'] = cert.issued_at
# Sort based on category
if category == "highest_level":
sorted_agents = sorted(
agent_scores.values(),
key=lambda x: ['basic', 'intermediate', 'advanced', 'enterprise', 'premium'].index(x['highest_level']),
reverse=True
)
elif category == "most_certifications":
sorted_agents = sorted(
agent_scores.values(),
key=lambda x: x['certification_count'],
reverse=True
)
else:
sorted_agents = sorted(
agent_scores.values(),
key=lambda x: x['total_privileges'],
reverse=True
)
return [
{
'rank': rank + 1,
'agent_id': agent['agent_id'],
'highest_level': agent['highest_level'],
'certification_count': agent['certification_count'],
'total_privileges': agent['total_privileges'],
'latest_certification': agent['latest_certification'].isoformat()
}
for rank, agent in enumerate(sorted_agents[:limit])
]
except Exception as e:
logger.error(f"Error getting certification leaderboard: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -0,0 +1,225 @@
"""
Community and Developer Ecosystem API Endpoints
REST API for managing OpenClaw developer profiles, SDKs, solutions, and hackathons
"""
from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.community_service import (
DeveloperEcosystemService, ThirdPartySolutionService,
InnovationLabService, CommunityPlatformService
)
from ..domain.community import (
DeveloperProfile, AgentSolution, InnovationLab,
CommunityPost, Hackathon, DeveloperTier, SolutionStatus, LabStatus
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/community", tags=["community"])
# Models
class DeveloperProfileCreate(BaseModel):
user_id: str
username: str
bio: Optional[str] = None
skills: List[str] = Field(default_factory=list)
class SolutionPublishRequest(BaseModel):
developer_id: str
title: str
description: str
version: str = "1.0.0"
capabilities: List[str] = Field(default_factory=list)
frameworks: List[str] = Field(default_factory=list)
price_model: str = "free"
price_amount: float = 0.0
metadata: Dict[str, Any] = Field(default_factory=dict)
class LabProposalRequest(BaseModel):
title: str
description: str
research_area: str
funding_goal: float = 0.0
milestones: List[Dict[str, Any]] = Field(default_factory=list)
class PostCreateRequest(BaseModel):
title: str
content: str
category: str = "discussion"
tags: List[str] = Field(default_factory=list)
parent_post_id: Optional[str] = None
class HackathonCreateRequest(BaseModel):
title: str
description: str
theme: str
sponsor: str = "AITBC Foundation"
prize_pool: float = 0.0
registration_start: str
registration_end: str
event_start: str
event_end: str
# Endpoints - Developer Ecosystem
@router.post("/developers", response_model=DeveloperProfile)
async def create_developer_profile(request: DeveloperProfileCreate, session: SessionDep):
"""Register a new developer in the OpenClaw ecosystem"""
service = DeveloperEcosystemService(session)
try:
profile = await service.create_developer_profile(
user_id=request.user_id,
username=request.username,
bio=request.bio,
skills=request.skills
)
return profile
except Exception as e:
logger.error(f"Error creating developer profile: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/developers/{developer_id}", response_model=DeveloperProfile)
async def get_developer_profile(developer_id: str, session: SessionDep):
"""Get a developer's profile and reputation"""
service = DeveloperEcosystemService(session)
profile = await service.get_developer_profile(developer_id)
if not profile:
raise HTTPException(status_code=404, detail="Developer not found")
return profile
@router.get("/sdk/latest")
async def get_latest_sdk(session: SessionDep):
"""Get information about the latest OpenClaw SDK releases"""
service = DeveloperEcosystemService(session)
return await service.get_sdk_release_info()
# Endpoints - Marketplace Solutions
@router.post("/solutions/publish", response_model=AgentSolution)
async def publish_solution(request: SolutionPublishRequest, session: SessionDep):
"""Publish a new third-party agent solution to the marketplace"""
service = ThirdPartySolutionService(session)
try:
solution = await service.publish_solution(request.developer_id, request.dict(exclude={'developer_id'}))
return solution
except Exception as e:
logger.error(f"Error publishing solution: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/solutions", response_model=List[AgentSolution])
async def list_solutions(
category: Optional[str] = None,
limit: int = 50,
):
"""List available third-party agent solutions"""
service = ThirdPartySolutionService(session)
return await service.list_published_solutions(category, limit)
@router.post("/solutions/{solution_id}/purchase")
async def purchase_solution(solution_id: str, session: SessionDep, buyer_id: str = Body(embed=True)):
"""Purchase or install a third-party solution"""
service = ThirdPartySolutionService(session)
try:
result = await service.purchase_solution(buyer_id, solution_id)
return result
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# Endpoints - Innovation Labs
@router.post("/labs/propose", response_model=InnovationLab)
async def propose_innovation_lab(
researcher_id: str = Query(...),
request: LabProposalRequest = Body(...),
):
"""Propose a new agent innovation lab or research program"""
service = InnovationLabService(session)
try:
lab = await service.propose_lab(researcher_id, request.dict())
return lab
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/labs/{lab_id}/join")
async def join_innovation_lab(lab_id: str, session: SessionDep, developer_id: str = Body(embed=True)):
"""Join an active innovation lab"""
service = InnovationLabService(session)
try:
lab = await service.join_lab(lab_id, developer_id)
return lab
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
@router.post("/labs/{lab_id}/fund")
async def fund_innovation_lab(lab_id: str, session: SessionDep, amount: float = Body(embed=True)):
"""Provide funding to a proposed innovation lab"""
service = InnovationLabService(session)
try:
lab = await service.fund_lab(lab_id, amount)
return lab
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
# Endpoints - Community Platform
@router.post("/platform/posts", response_model=CommunityPost)
async def create_community_post(
author_id: str = Query(...),
request: PostCreateRequest = Body(...),
):
"""Create a new post in the community forum"""
service = CommunityPlatformService(session)
try:
post = await service.create_post(author_id, request.dict())
return post
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/platform/feed", response_model=List[CommunityPost])
async def get_community_feed(
category: Optional[str] = None,
limit: int = 20,
):
"""Get the latest community posts and discussions"""
service = CommunityPlatformService(session)
return await service.get_feed(category, limit)
@router.post("/platform/posts/{post_id}/upvote")
async def upvote_community_post(post_id: str, session: SessionDep):
"""Upvote a community post (rewards author reputation)"""
service = CommunityPlatformService(session)
try:
post = await service.upvote_post(post_id)
return post
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
# Endpoints - Hackathons
@router.post("/hackathons/create", response_model=Hackathon)
async def create_hackathon(
organizer_id: str = Query(...),
request: HackathonCreateRequest = Body(...),
):
"""Create a new agent innovation hackathon (requires high reputation)"""
service = CommunityPlatformService(session)
try:
hackathon = await service.create_hackathon(organizer_id, request.dict())
return hackathon
except ValueError as e:
raise HTTPException(status_code=403, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/hackathons/{hackathon_id}/register")
async def register_for_hackathon(hackathon_id: str, session: SessionDep, developer_id: str = Body(embed=True)):
"""Register for an upcoming or ongoing hackathon"""
service = CommunityPlatformService(session)
try:
hackathon = await service.register_for_hackathon(hackathon_id, developer_id)
return hackathon
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))

View File

@@ -1,384 +1,147 @@
"""
Governance Router - Proposal voting and parameter changes
Decentralized Governance API Endpoints
REST API for OpenClaw DAO voting, proposals, and governance analytics
"""
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any, List
from datetime import datetime, timedelta
import json
import logging
from ..storage import SessionDep
from ..services.governance_service import GovernanceService
from ..domain.governance import (
GovernanceProfile, Proposal, Vote, DaoTreasury, TransparencyReport,
ProposalStatus, VoteType, GovernanceRole
)
logger = logging.getLogger(__name__)
from ..schemas import UserProfile
from ..storage import SessionDep
from ..storage.models_governance import GovernanceProposal, ProposalVote
from sqlmodel import select, func
router = APIRouter(prefix="/governance", tags=["governance"])
router = APIRouter(tags=["governance"])
# Models
class ProfileInitRequest(BaseModel):
user_id: str
initial_voting_power: float = 0.0
class DelegationRequest(BaseModel):
delegatee_id: str
class ProposalCreate(BaseModel):
"""Create a new governance proposal"""
title: str = Field(..., min_length=10, max_length=200)
description: str = Field(..., min_length=50, max_length=5000)
type: str = Field(..., pattern="^(parameter_change|protocol_upgrade|fund_allocation|policy_change)$")
target: Optional[Dict[str, Any]] = Field(default_factory=dict)
voting_period: int = Field(default=7, ge=1, le=30) # days
quorum_threshold: float = Field(default=0.1, ge=0.01, le=1.0) # 10% default
approval_threshold: float = Field(default=0.5, ge=0.01, le=1.0) # 50% default
class ProposalResponse(BaseModel):
"""Governance proposal response"""
id: str
class ProposalCreateRequest(BaseModel):
title: str
description: str
type: str
target: Dict[str, Any]
proposer: str
status: str
created_at: datetime
voting_deadline: datetime
quorum_threshold: float
approval_threshold: float
current_quorum: float
current_approval: float
votes_for: int
votes_against: int
votes_abstain: int
total_voting_power: int
category: str = "general"
execution_payload: Dict[str, Any] = Field(default_factory=dict)
quorum_required: float = 1000.0
voting_starts: Optional[str] = None
voting_ends: Optional[str] = None
class VoteRequest(BaseModel):
vote_type: VoteType
reason: Optional[str] = None
class VoteSubmit(BaseModel):
"""Submit a vote on a proposal"""
proposal_id: str
vote: str = Field(..., pattern="^(for|against|abstain)$")
reason: Optional[str] = Field(max_length=500)
# Endpoints - Profile & Delegation
@router.post("/profiles", response_model=GovernanceProfile)
async def init_governance_profile(request: ProfileInitRequest, session: SessionDep):
"""Initialize a governance profile for a user"""
service = GovernanceService(session)
try:
profile = await service.get_or_create_profile(request.user_id, request.initial_voting_power)
return profile
except Exception as e:
logger.error(f"Error creating governance profile: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/profiles/{profile_id}/delegate", response_model=GovernanceProfile)
async def delegate_voting_power(profile_id: str, request: DelegationRequest, session: SessionDep):
"""Delegate your voting power to another DAO member"""
service = GovernanceService(session)
try:
profile = await service.delegate_votes(profile_id, request.delegatee_id)
return profile
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/governance/proposals", response_model=ProposalResponse)
# Endpoints - Proposals
@router.post("/proposals", response_model=Proposal)
async def create_proposal(
proposal: ProposalCreate,
user: UserProfile,
session: SessionDep
) -> ProposalResponse:
"""Create a new governance proposal"""
# Check if user has voting power
voting_power = await get_user_voting_power(user.user_id, session)
if voting_power == 0:
raise HTTPException(403, "You must have voting power to create proposals")
# Create proposal
db_proposal = GovernanceProposal(
title=proposal.title,
description=proposal.description,
type=proposal.type,
target=proposal.target,
proposer=user.user_id,
status="active",
created_at=datetime.utcnow(),
voting_deadline=datetime.utcnow() + timedelta(days=proposal.voting_period),
quorum_threshold=proposal.quorum_threshold,
approval_threshold=proposal.approval_threshold
)
session.add(db_proposal)
session.commit()
session.refresh(db_proposal)
# Return response
return await format_proposal_response(db_proposal, session)
session: SessionDep,
proposer_id: str = Query(...),
request: ProposalCreateRequest = Body(...)
):
"""Submit a new governance proposal to the DAO"""
service = GovernanceService(session)
try:
proposal = await service.create_proposal(proposer_id, request.dict())
return proposal
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/governance/proposals", response_model=List[ProposalResponse])
async def list_proposals(
status: Optional[str] = None,
limit: int = 20,
offset: int = 0,
session: SessionDep = None
) -> List[ProposalResponse]:
"""List governance proposals"""
query = select(GovernanceProposal)
if status:
query = query.where(GovernanceProposal.status == status)
query = query.order_by(GovernanceProposal.created_at.desc())
query = query.offset(offset).limit(limit)
proposals = session.exec(query).all()
responses = []
for proposal in proposals:
formatted = await format_proposal_response(proposal, session)
responses.append(formatted)
return responses
@router.get("/governance/proposals/{proposal_id}", response_model=ProposalResponse)
async def get_proposal(
@router.post("/proposals/{proposal_id}/vote", response_model=Vote)
async def cast_vote(
proposal_id: str,
session: SessionDep
) -> ProposalResponse:
"""Get a specific proposal"""
proposal = session.get(GovernanceProposal, proposal_id)
if not proposal:
raise HTTPException(404, "Proposal not found")
return await format_proposal_response(proposal, session)
@router.post("/governance/vote")
async def submit_vote(
vote: VoteSubmit,
user: UserProfile,
session: SessionDep
) -> Dict[str, str]:
"""Submit a vote on a proposal"""
# Check proposal exists and is active
proposal = session.get(GovernanceProposal, vote.proposal_id)
if not proposal:
raise HTTPException(404, "Proposal not found")
if proposal.status != "active":
raise HTTPException(400, "Proposal is not active for voting")
if datetime.utcnow() > proposal.voting_deadline:
raise HTTPException(400, "Voting period has ended")
# Check user voting power
voting_power = await get_user_voting_power(user.user_id, session)
if voting_power == 0:
raise HTTPException(403, "You have no voting power")
# Check if already voted
existing = session.exec(
select(ProposalVote).where(
ProposalVote.proposal_id == vote.proposal_id,
ProposalVote.voter_id == user.user_id
session: SessionDep,
voter_id: str = Query(...),
request: VoteRequest = Body(...)
):
"""Cast a vote on an active proposal"""
service = GovernanceService(session)
try:
vote = await service.cast_vote(
proposal_id=proposal_id,
voter_id=voter_id,
vote_type=request.vote_type,
reason=request.reason
)
).first()
if existing:
# Update existing vote
existing.vote = vote.vote
existing.reason = vote.reason
existing.voted_at = datetime.utcnow()
else:
# Create new vote
db_vote = ProposalVote(
proposal_id=vote.proposal_id,
voter_id=user.user_id,
vote=vote.vote,
voting_power=voting_power,
reason=vote.reason,
voted_at=datetime.utcnow()
)
session.add(db_vote)
session.commit()
# Check if proposal should be finalized
if datetime.utcnow() >= proposal.voting_deadline:
await finalize_proposal(proposal, session)
return {"message": "Vote submitted successfully"}
return vote
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.post("/proposals/{proposal_id}/process", response_model=Proposal)
async def process_proposal(proposal_id: str, session: SessionDep):
"""Manually trigger the lifecycle check of a proposal (e.g., tally votes when time ends)"""
service = GovernanceService(session)
try:
proposal = await service.process_proposal_lifecycle(proposal_id)
return proposal
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/governance/voting-power/{user_id}")
async def get_voting_power(
user_id: str,
session: SessionDep
) -> Dict[str, int]:
"""Get a user's voting power"""
power = await get_user_voting_power(user_id, session)
return {"user_id": user_id, "voting_power": power}
@router.get("/governance/parameters")
async def get_governance_parameters(
session: SessionDep
) -> Dict[str, Any]:
"""Get current governance parameters"""
# These would typically be stored in a config table
return {
"min_proposal_voting_power": 1000,
"max_proposal_title_length": 200,
"max_proposal_description_length": 5000,
"default_voting_period_days": 7,
"max_voting_period_days": 30,
"min_quorum_threshold": 0.01,
"max_quorum_threshold": 1.0,
"min_approval_threshold": 0.01,
"max_approval_threshold": 1.0,
"execution_delay_hours": 24
}
@router.post("/governance/execute/{proposal_id}")
@router.post("/proposals/{proposal_id}/execute", response_model=Proposal)
async def execute_proposal(
proposal_id: str,
background_tasks: BackgroundTasks,
session: SessionDep
) -> Dict[str, str]:
"""Execute an approved proposal"""
proposal = session.get(GovernanceProposal, proposal_id)
if not proposal:
raise HTTPException(404, "Proposal not found")
if proposal.status != "passed":
raise HTTPException(400, "Proposal must be passed to execute")
if datetime.utcnow() < proposal.voting_deadline + timedelta(hours=24):
raise HTTPException(400, "Must wait 24 hours after voting ends to execute")
# Execute proposal based on type
if proposal.type == "parameter_change":
await execute_parameter_change(proposal.target, background_tasks)
elif proposal.type == "protocol_upgrade":
await execute_protocol_upgrade(proposal.target, background_tasks)
elif proposal.type == "fund_allocation":
await execute_fund_allocation(proposal.target, background_tasks)
elif proposal.type == "policy_change":
await execute_policy_change(proposal.target, background_tasks)
# Update proposal status
proposal.status = "executed"
proposal.executed_at = datetime.utcnow()
session.commit()
return {"message": "Proposal executed successfully"}
session: SessionDep,
executor_id: str = Query(...)
):
"""Execute the payload of a succeeded proposal"""
service = GovernanceService(session)
try:
proposal = await service.execute_proposal(proposal_id, executor_id)
return proposal
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# Helper functions
async def get_user_voting_power(user_id: str, session) -> int:
"""Calculate a user's voting power based on AITBC holdings"""
# In a real implementation, this would query the blockchain
# For now, return a mock value
return 10000 # Mock voting power
async def format_proposal_response(proposal: GovernanceProposal, session) -> ProposalResponse:
"""Format a proposal for API response"""
# Get vote counts
votes = session.exec(
select(ProposalVote).where(ProposalVote.proposal_id == proposal.id)
).all()
votes_for = sum(1 for v in votes if v.vote == "for")
votes_against = sum(1 for v in votes if v.vote == "against")
votes_abstain = sum(1 for v in votes if v.vote == "abstain")
# Get total voting power
total_power = sum(v.voting_power for v in votes)
power_for = sum(v.voting_power for v in votes if v.vote == "for")
# Calculate quorum and approval
total_voting_power = await get_total_voting_power(session)
current_quorum = total_power / total_voting_power if total_voting_power > 0 else 0
current_approval = power_for / total_power if total_power > 0 else 0
return ProposalResponse(
id=proposal.id,
title=proposal.title,
description=proposal.description,
type=proposal.type,
target=proposal.target,
proposer=proposal.proposer,
status=proposal.status,
created_at=proposal.created_at,
voting_deadline=proposal.voting_deadline,
quorum_threshold=proposal.quorum_threshold,
approval_threshold=proposal.approval_threshold,
current_quorum=current_quorum,
current_approval=current_approval,
votes_for=votes_for,
votes_against=votes_against,
votes_abstain=votes_abstain,
total_voting_power=total_voting_power
)
async def get_total_voting_power(session) -> int:
"""Get total voting power in the system"""
# In a real implementation, this would sum all AITBC tokens
return 1000000 # Mock total voting power
async def finalize_proposal(proposal: GovernanceProposal, session):
"""Finalize a proposal after voting ends"""
# Get final vote counts
votes = session.exec(
select(ProposalVote).where(ProposalVote.proposal_id == proposal.id)
).all()
total_power = sum(v.voting_power for v in votes)
power_for = sum(v.voting_power for v in votes if v.vote == "for")
total_voting_power = await get_total_voting_power(session)
quorum = total_power / total_voting_power if total_voting_power > 0 else 0
approval = power_for / total_power if total_power > 0 else 0
# Check if quorum met
if quorum < proposal.quorum_threshold:
proposal.status = "rejected"
proposal.rejection_reason = "Quorum not met"
# Check if approval threshold met
elif approval < proposal.approval_threshold:
proposal.status = "rejected"
proposal.rejection_reason = "Approval threshold not met"
else:
proposal.status = "passed"
session.commit()
async def execute_parameter_change(target: Dict[str, Any], background_tasks):
"""Execute a parameter change proposal"""
# This would update system parameters
logger.info("Executing parameter change: %s", target)
# Implementation would depend on the specific parameters
async def execute_protocol_upgrade(target: Dict[str, Any], background_tasks):
"""Execute a protocol upgrade proposal"""
# This would trigger a protocol upgrade
logger.info("Executing protocol upgrade: %s", target)
# Implementation would involve coordinating with nodes
async def execute_fund_allocation(target: Dict[str, Any], background_tasks):
"""Execute a fund allocation proposal"""
# This would transfer funds from treasury
logger.info("Executing fund allocation: %s", target)
# Implementation would involve treasury management
async def execute_policy_change(target: Dict[str, Any], background_tasks):
"""Execute a policy change proposal"""
# This would update system policies
logger.info("Executing policy change: %s", target)
# Implementation would depend on the specific policy
# Export the router
__all__ = ["router"]
# Endpoints - Analytics
@router.post("/analytics/reports", response_model=TransparencyReport)
async def generate_transparency_report(
session: SessionDep,
period: str = Query(..., description="e.g., 2026-Q1")
):
"""Generate a governance analytics and transparency report"""
service = GovernanceService(session)
try:
report = await service.generate_transparency_report(period)
return report
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -0,0 +1,196 @@
"""
Marketplace Performance Optimization API Endpoints
REST API for managing distributed processing, GPU optimization, caching, and scaling
"""
import asyncio
from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../../../gpu_acceleration"))
from marketplace_gpu_optimizer import MarketplaceGPUOptimizer
from aitbc.gpu_acceleration.parallel_processing.distributed_framework import DistributedProcessingCoordinator, DistributedTask, WorkerStatus
from aitbc.gpu_acceleration.parallel_processing.marketplace_cache_optimizer import MarketplaceDataOptimizer
from aitbc.gpu_acceleration.parallel_processing.marketplace_monitor import monitor as marketplace_monitor
from aitbc.gpu_acceleration.parallel_processing.marketplace_scaler import ResourceScaler, ScalingPolicy
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/marketplace/performance", tags=["marketplace-performance"])
# Global instances (in a real app these might be injected or application state)
gpu_optimizer = MarketplaceGPUOptimizer()
distributed_coordinator = DistributedProcessingCoordinator()
cache_optimizer = MarketplaceDataOptimizer()
resource_scaler = ResourceScaler()
# Startup event handler for background tasks
@router.on_event("startup")
async def startup_event():
await marketplace_monitor.start()
await distributed_coordinator.start()
await resource_scaler.start()
await cache_optimizer.connect()
@router.on_event("shutdown")
async def shutdown_event():
await marketplace_monitor.stop()
await distributed_coordinator.stop()
await resource_scaler.stop()
await cache_optimizer.disconnect()
# Models
class GPUAllocationRequest(BaseModel):
job_id: Optional[str] = None
memory_bytes: int = Field(1024 * 1024 * 1024, description="Memory needed in bytes")
compute_units: float = Field(1.0, description="Relative compute requirement")
max_latency_ms: int = Field(1000, description="Max acceptable latency")
priority: int = Field(1, ge=1, le=10, description="Job priority 1-10")
class GPUReleaseRequest(BaseModel):
job_id: str
class DistributedTaskRequest(BaseModel):
agent_id: str
payload: Dict[str, Any]
priority: int = Field(1, ge=1, le=100)
requires_gpu: bool = Field(False)
timeout_ms: int = Field(30000)
class WorkerRegistrationRequest(BaseModel):
worker_id: str
capabilities: List[str]
has_gpu: bool = Field(False)
max_concurrent_tasks: int = Field(4)
class ScalingPolicyUpdate(BaseModel):
min_nodes: Optional[int] = None
max_nodes: Optional[int] = None
target_utilization: Optional[float] = None
scale_up_threshold: Optional[float] = None
predictive_scaling: Optional[bool] = None
# Endpoints: GPU Optimization
@router.post("/gpu/allocate")
async def allocate_gpu_resources(request: GPUAllocationRequest):
"""Request optimal GPU resource allocation for a marketplace task"""
try:
start_time = time.time()
result = await gpu_optimizer.optimize_resource_allocation(request.dict())
marketplace_monitor.record_api_call((time.time() - start_time) * 1000)
if not result.get("success"):
raise HTTPException(status_code=503, detail=result.get("reason", "Resources unavailable"))
return result
except HTTPException:
raise
except Exception as e:
marketplace_monitor.record_api_call(0, is_error=True)
logger.error(f"Error in GPU allocation: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/gpu/release")
async def release_gpu_resources(request: GPUReleaseRequest):
"""Release previously allocated GPU resources"""
success = gpu_optimizer.release_resources(request.job_id)
if not success:
raise HTTPException(status_code=404, detail="Job ID not found")
return {"success": True, "message": f"Resources for {request.job_id} released"}
@router.get("/gpu/status")
async def get_gpu_status():
"""Get overall GPU fleet status and optimization metrics"""
return gpu_optimizer.get_system_status()
# Endpoints: Distributed Processing
@router.post("/distributed/task")
async def submit_distributed_task(request: DistributedTaskRequest):
"""Submit a task to the distributed processing framework"""
task = DistributedTask(
task_id=None,
agent_id=request.agent_id,
payload=request.payload,
priority=request.priority,
requires_gpu=request.requires_gpu,
timeout_ms=request.timeout_ms
)
task_id = await distributed_coordinator.submit_task(task)
return {"task_id": task_id, "status": "submitted"}
@router.get("/distributed/task/{task_id}")
async def get_distributed_task_status(task_id: str):
"""Check the status and get results of a distributed task"""
status = await distributed_coordinator.get_task_status(task_id)
if not status:
raise HTTPException(status_code=404, detail="Task not found")
return status
@router.post("/distributed/worker/register")
async def register_worker(request: WorkerRegistrationRequest):
"""Register a new worker node in the cluster"""
distributed_coordinator.register_worker(
worker_id=request.worker_id,
capabilities=request.capabilities,
has_gpu=request.has_gpu,
max_tasks=request.max_concurrent_tasks
)
return {"success": True, "message": f"Worker {request.worker_id} registered"}
@router.get("/distributed/status")
async def get_cluster_status():
"""Get overall distributed cluster health and load"""
return distributed_coordinator.get_cluster_status()
# Endpoints: Caching
@router.get("/cache/stats")
async def get_cache_stats():
"""Get current caching performance statistics"""
return {
"status": "connected" if cache_optimizer.is_connected else "local_only",
"l1_cache_size": len(cache_optimizer.l1_cache.cache),
"namespaces_tracked": list(cache_optimizer.ttls.keys())
}
@router.post("/cache/invalidate/{namespace}")
async def invalidate_cache_namespace(namespace: str, background_tasks: BackgroundTasks):
"""Invalidate a specific cache namespace (e.g., 'order_book')"""
background_tasks.add_task(cache_optimizer.invalidate_namespace, namespace)
return {"success": True, "message": f"Invalidation for {namespace} queued"}
# Endpoints: Monitoring
@router.get("/monitor/dashboard")
async def get_monitoring_dashboard():
"""Get real-time performance dashboard data"""
return marketplace_monitor.get_realtime_dashboard_data()
# Endpoints: Auto-scaling
@router.get("/scaler/status")
async def get_scaler_status():
"""Get current auto-scaler status and active rules"""
return resource_scaler.get_status()
@router.post("/scaler/policy")
async def update_scaling_policy(policy_update: ScalingPolicyUpdate):
"""Update auto-scaling thresholds and parameters dynamically"""
current_policy = resource_scaler.policy
if policy_update.min_nodes is not None:
current_policy.min_nodes = policy_update.min_nodes
if policy_update.max_nodes is not None:
current_policy.max_nodes = policy_update.max_nodes
if policy_update.target_utilization is not None:
current_policy.target_utilization = policy_update.target_utilization
if policy_update.scale_up_threshold is not None:
current_policy.scale_up_threshold = policy_update.scale_up_threshold
if policy_update.predictive_scaling is not None:
current_policy.predictive_scaling = policy_update.predictive_scaling
return {"success": True, "message": "Scaling policy updated successfully"}

View File

@@ -0,0 +1,822 @@
"""
Multi-Modal Fusion and Advanced RL API Endpoints
REST API for multi-modal agent fusion and advanced reinforcement learning
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks, WebSocket, WebSocketDisconnect
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.multi_modal_fusion import MultiModalFusionEngine
from ..services.advanced_reinforcement_learning import AdvancedReinforcementLearningEngine, MarketplaceStrategyOptimizer, CrossDomainCapabilityIntegrator
from ..domain.agent_performance import (
FusionModel, ReinforcementLearningConfig, AgentCapability,
CreativeCapability
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/multi-modal-rl", tags=["multi-modal-rl"])
# Pydantic models for API requests/responses
class FusionModelRequest(BaseModel):
"""Request model for fusion model creation"""
model_name: str
fusion_type: str = Field(default="cross_domain")
base_models: List[str]
input_modalities: List[str]
fusion_strategy: str = Field(default="ensemble_fusion")
class FusionModelResponse(BaseModel):
"""Response model for fusion model"""
fusion_id: str
model_name: str
fusion_type: str
base_models: List[str]
input_modalities: List[str]
fusion_strategy: str
status: str
fusion_performance: Dict[str, float]
synergy_score: float
robustness_score: float
created_at: str
trained_at: Optional[str]
class FusionRequest(BaseModel):
"""Request model for fusion inference"""
fusion_id: str
input_data: Dict[str, Any]
class FusionResponse(BaseModel):
"""Response model for fusion result"""
fusion_type: str
combined_result: Dict[str, Any]
confidence: float
metadata: Dict[str, Any]
class RLAgentRequest(BaseModel):
"""Request model for RL agent creation"""
agent_id: str
environment_type: str
algorithm: str = Field(default="ppo")
training_config: Dict[str, Any] = Field(default_factory=dict)
class RLAgentResponse(BaseModel):
"""Response model for RL agent"""
config_id: str
agent_id: str
environment_type: str
algorithm: str
status: str
learning_rate: float
discount_factor: float
exploration_rate: float
max_episodes: int
created_at: str
trained_at: Optional[str]
class RLTrainingResponse(BaseModel):
"""Response model for RL training"""
config_id: str
final_performance: float
convergence_episode: int
training_episodes: int
success_rate: float
training_time: float
class StrategyOptimizationRequest(BaseModel):
"""Request model for strategy optimization"""
agent_id: str
strategy_type: str
algorithm: str = Field(default="ppo")
training_episodes: int = Field(default=500)
class StrategyOptimizationResponse(BaseModel):
"""Response model for strategy optimization"""
success: bool
config_id: str
strategy_type: str
algorithm: str
final_performance: float
convergence_episode: int
training_episodes: int
success_rate: float
class CapabilityIntegrationRequest(BaseModel):
"""Request model for capability integration"""
agent_id: str
capabilities: List[str]
integration_strategy: str = Field(default="adaptive")
class CapabilityIntegrationResponse(BaseModel):
"""Response model for capability integration"""
agent_id: str
integration_strategy: str
domain_capabilities: Dict[str, List[Dict[str, Any]]]
synergy_score: float
enhanced_capabilities: List[str]
fusion_model_id: str
integration_result: Dict[str, Any]
# API Endpoints
@router.post("/fusion/models", response_model=FusionModelResponse)
async def create_fusion_model(
fusion_request: FusionModelRequest,
session: SessionDep
) -> FusionModelResponse:
"""Create multi-modal fusion model"""
fusion_engine = MultiModalFusionEngine()
try:
fusion_model = await fusion_engine.create_fusion_model(
session=session,
model_name=fusion_request.model_name,
fusion_type=fusion_request.fusion_type,
base_models=fusion_request.base_models,
input_modalities=fusion_request.input_modalities,
fusion_strategy=fusion_request.fusion_strategy
)
return FusionModelResponse(
fusion_id=fusion_model.fusion_id,
model_name=fusion_model.model_name,
fusion_type=fusion_model.fusion_type,
base_models=fusion_model.base_models,
input_modalities=fusion_model.input_modalities,
fusion_strategy=fusion_model.fusion_strategy,
status=fusion_model.status,
fusion_performance=fusion_model.fusion_performance,
synergy_score=fusion_model.synergy_score,
robustness_score=fusion_model.robustness_score,
created_at=fusion_model.created_at.isoformat(),
trained_at=fusion_model.trained_at.isoformat() if fusion_model.trained_at else None
)
except Exception as e:
logger.error(f"Error creating fusion model: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/fusion/{fusion_id}/infer", response_model=FusionResponse)
async def fuse_modalities(
fusion_id: str,
fusion_request: FusionRequest,
session: SessionDep
) -> FusionResponse:
"""Fuse modalities using trained model"""
fusion_engine = MultiModalFusionEngine()
try:
fusion_result = await fusion_engine.fuse_modalities(
session=session,
fusion_id=fusion_id,
input_data=fusion_request.input_data
)
return FusionResponse(
fusion_type=fusion_result['fusion_type'],
combined_result=fusion_result['combined_result'],
confidence=fusion_result.get('confidence', 0.0),
metadata={
'modality_contributions': fusion_result.get('modality_contributions', {}),
'attention_weights': fusion_result.get('attention_weights', {}),
'optimization_gain': fusion_result.get('optimization_gain', 0.0)
}
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error during fusion: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/fusion/models")
async def list_fusion_models(
session: SessionDep,
status: Optional[str] = Query(default=None, description="Filter by status"),
fusion_type: Optional[str] = Query(default=None, description="Filter by fusion type"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results")
) -> List[Dict[str, Any]]:
"""List fusion models"""
try:
query = select(FusionModel)
if status:
query = query.where(FusionModel.status == status)
if fusion_type:
query = query.where(FusionModel.fusion_type == fusion_type)
models = session.exec(
query.order_by(FusionModel.created_at.desc()).limit(limit)
).all()
return [
{
"fusion_id": model.fusion_id,
"model_name": model.model_name,
"fusion_type": model.fusion_type,
"base_models": model.base_models,
"input_modalities": model.input_modalities,
"fusion_strategy": model.fusion_strategy,
"status": model.status,
"fusion_performance": model.fusion_performance,
"synergy_score": model.synergy_score,
"robustness_score": model.robustness_score,
"computational_complexity": model.computational_complexity,
"memory_requirement": model.memory_requirement,
"inference_time": model.inference_time,
"deployment_count": model.deployment_count,
"performance_stability": model.performance_stability,
"created_at": model.created_at.isoformat(),
"trained_at": model.trained_at.isoformat() if model.trained_at else None
}
for model in models
]
except Exception as e:
logger.error(f"Error listing fusion models: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/rl/agents", response_model=RLAgentResponse)
async def create_rl_agent(
agent_request: RLAgentRequest,
session: SessionDep
) -> RLAgentResponse:
"""Create RL agent for marketplace strategies"""
rl_engine = AdvancedReinforcementLearningEngine()
try:
rl_config = await rl_engine.create_rl_agent(
session=session,
agent_id=agent_request.agent_id,
environment_type=agent_request.environment_type,
algorithm=agent_request.algorithm,
training_config=agent_request.training_config
)
return RLAgentResponse(
config_id=rl_config.config_id,
agent_id=rl_config.agent_id,
environment_type=rl_config.environment_type,
algorithm=rl_config.algorithm,
status=rl_config.status,
learning_rate=rl_config.learning_rate,
discount_factor=rl_config.discount_factor,
exploration_rate=rl_config.exploration_rate,
max_episodes=rl_config.max_episodes,
created_at=rl_config.created_at.isoformat(),
trained_at=rl_config.trained_at.isoformat() if rl_config.trained_at else None
)
except Exception as e:
logger.error(f"Error creating RL agent: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.websocket("/fusion/{fusion_id}/stream")
async def fuse_modalities_stream(
websocket: WebSocket,
fusion_id: str,
session: SessionDep
):
"""Stream modalities and receive fusion results via WebSocket for high performance"""
await websocket.accept()
fusion_engine = MultiModalFusionEngine()
try:
while True:
# Receive text data (JSON) containing input modalities
data = await websocket.receive_json()
# Start timing
start_time = datetime.utcnow()
# Process fusion
fusion_result = await fusion_engine.fuse_modalities(
session=session,
fusion_id=fusion_id,
input_data=data
)
# End timing
processing_time = (datetime.utcnow() - start_time).total_seconds()
# Send result back
await websocket.send_json({
"fusion_type": fusion_result['fusion_type'],
"combined_result": fusion_result['combined_result'],
"confidence": fusion_result.get('confidence', 0.0),
"metadata": {
"processing_time": processing_time,
"fusion_strategy": fusion_result.get('strategy', 'unknown'),
"protocol": "websocket"
}
})
except WebSocketDisconnect:
logger.info(f"WebSocket client disconnected from fusion stream {fusion_id}")
except Exception as e:
logger.error(f"Error in fusion stream: {str(e)}")
try:
await websocket.send_json({"error": str(e)})
await websocket.close(code=1011, reason=str(e))
except:
pass
@router.get("/rl/agents/{agent_id}")
async def get_rl_agents(
agent_id: str,
session: SessionDep,
status: Optional[str] = Query(default=None, description="Filter by status"),
algorithm: Optional[str] = Query(default=None, description="Filter by algorithm"),
limit: int = Query(default=20, ge=1, le=100, description="Number of results")
) -> List[Dict[str, Any]]:
"""Get RL agents for agent"""
try:
query = select(ReinforcementLearningConfig).where(ReinforcementLearningConfig.agent_id == agent_id)
if status:
query = query.where(ReinforcementLearningConfig.status == status)
if algorithm:
query = query.where(ReinforcementLearningConfig.algorithm == algorithm)
configs = session.exec(
query.order_by(ReinforcementLearningConfig.created_at.desc()).limit(limit)
).all()
return [
{
"config_id": config.config_id,
"agent_id": config.agent_id,
"environment_type": config.environment_type,
"algorithm": config.algorithm,
"status": config.status,
"learning_rate": config.learning_rate,
"discount_factor": config.discount_factor,
"exploration_rate": config.exploration_rate,
"batch_size": config.batch_size,
"network_layers": config.network_layers,
"activation_functions": config.activation_functions,
"max_episodes": config.max_episodes,
"max_steps_per_episode": config.max_steps_per_episode,
"action_space": config.action_space,
"state_space": config.state_space,
"reward_history": config.reward_history,
"success_rate_history": config.success_rate_history,
"convergence_episode": config.convergence_episode,
"training_progress": config.training_progress,
"deployment_performance": config.deployment_performance,
"deployment_count": config.deployment_count,
"created_at": config.created_at.isoformat(),
"trained_at": config.trained_at.isoformat() if config.trained_at else None,
"deployed_at": config.deployed_at.isoformat() if config.deployed_at else None
}
for config in configs
]
except Exception as e:
logger.error(f"Error getting RL agents for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/rl/optimize-strategy", response_model=StrategyOptimizationResponse)
async def optimize_strategy(
optimization_request: StrategyOptimizationRequest,
session: SessionDep
) -> StrategyOptimizationResponse:
"""Optimize agent strategy using RL"""
strategy_optimizer = MarketplaceStrategyOptimizer()
try:
result = await strategy_optimizer.optimize_agent_strategy(
session=session,
agent_id=optimization_request.agent_id,
strategy_type=optimization_request.strategy_type,
algorithm=optimization_request.algorithm,
training_episodes=optimization_request.training_episodes
)
return StrategyOptimizationResponse(
success=result['success'],
config_id=result.get('config_id'),
strategy_type=result.get('strategy_type'),
algorithm=result.get('algorithm'),
final_performance=result.get('final_performance', 0.0),
convergence_episode=result.get('convergence_episode', 0),
training_episodes=result.get('training_episodes', 0),
success_rate=result.get('success_rate', 0.0)
)
except Exception as e:
logger.error(f"Error optimizing strategy: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/rl/deploy-strategy")
async def deploy_strategy(
config_id: str,
deployment_context: Dict[str, Any],
session: SessionDep
) -> Dict[str, Any]:
"""Deploy trained strategy"""
strategy_optimizer = MarketplaceStrategyOptimizer()
try:
result = await strategy_optimizer.deploy_strategy(
session=session,
config_id=config_id,
deployment_context=deployment_context
)
return result
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error deploying strategy: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/capabilities/integrate", response_model=CapabilityIntegrationResponse)
async def integrate_capabilities(
integration_request: CapabilityIntegrationRequest,
session: SessionDep
) -> CapabilityIntegrationResponse:
"""Integrate capabilities across domains"""
capability_integrator = CrossDomainCapabilityIntegrator()
try:
result = await capability_integrator.integrate_cross_domain_capabilities(
session=session,
agent_id=integration_request.agent_id,
capabilities=integration_request.capabilities,
integration_strategy=integration_request.integration_strategy
)
# Format domain capabilities for response
formatted_domain_caps = {}
for domain, caps in result['domain_capabilities'].items():
formatted_domain_caps[domain] = [
{
"capability_id": cap.capability_id,
"capability_name": cap.capability_name,
"capability_type": cap.capability_type,
"skill_level": cap.skill_level,
"proficiency_score": cap.proficiency_score
}
for cap in caps
]
return CapabilityIntegrationResponse(
agent_id=result['agent_id'],
integration_strategy=result['integration_strategy'],
domain_capabilities=formatted_domain_caps,
synergy_score=result['synergy_score'],
enhanced_capabilities=result['enhanced_capabilities'],
fusion_model_id=result['fusion_model_id'],
integration_result=result['integration_result']
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error integrating capabilities: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/capabilities/{agent_id}/domains")
async def get_agent_domain_capabilities(
agent_id: str,
session: SessionDep,
domain: Optional[str] = Query(default=None, description="Filter by domain"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results")
) -> List[Dict[str, Any]]:
"""Get agent capabilities grouped by domain"""
try:
query = select(AgentCapability).where(AgentCapability.agent_id == agent_id)
if domain:
query = query.where(AgentCapability.domain_area == domain)
capabilities = session.exec(
query.order_by(AgentCapability.skill_level.desc()).limit(limit)
).all()
# Group by domain
domain_capabilities = {}
for cap in capabilities:
if cap.domain_area not in domain_capabilities:
domain_capabilities[cap.domain_area] = []
domain_capabilities[cap.domain_area].append({
"capability_id": cap.capability_id,
"capability_name": cap.capability_name,
"capability_type": cap.capability_type,
"skill_level": cap.skill_level,
"proficiency_score": cap.proficiency_score,
"specialization_areas": cap.specialization_areas,
"learning_rate": cap.learning_rate,
"adaptation_speed": cap.adaptation_speed,
"certified": cap.certified,
"certification_level": cap.certification_level,
"status": cap.status,
"acquired_at": cap.acquired_at.isoformat(),
"last_improved": cap.last_improved.isoformat() if cap.last_improved else None
})
return [
{
"domain": domain,
"capabilities": caps,
"total_capabilities": len(caps),
"average_skill_level": sum(cap["skill_level"] for cap in caps) / len(caps) if caps else 0.0,
"highest_skill_level": max(cap["skill_level"] for cap in caps) if caps else 0.0
}
for domain, caps in domain_capabilities.items()
]
except Exception as e:
logger.error(f"Error getting domain capabilities for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/creative-capabilities/{agent_id}")
async def get_creative_capabilities(
agent_id: str,
session: SessionDep,
creative_domain: Optional[str] = Query(default=None, description="Filter by creative domain"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results")
) -> List[Dict[str, Any]]:
"""Get creative capabilities for agent"""
try:
query = select(CreativeCapability).where(CreativeCapability.agent_id == agent_id)
if creative_domain:
query = query.where(CreativeCapability.creative_domain == creative_domain)
capabilities = session.exec(
query.order_by(CreativeCapability.originality_score.desc()).limit(limit)
).all()
return [
{
"capability_id": cap.capability_id,
"agent_id": cap.agent_id,
"creative_domain": cap.creative_domain,
"capability_type": cap.capability_type,
"originality_score": cap.originality_score,
"novelty_score": cap.novelty_score,
"aesthetic_quality": cap.aesthetic_quality,
"coherence_score": cap.coherence_score,
"generation_models": cap.generation_models,
"style_variety": cap.style_variety,
"output_quality": cap.output_quality,
"creative_learning_rate": cap.creative_learning_rate,
"style_adaptation": cap.style_adaptation,
"cross_domain_transfer": cap.cross_domain_transfer,
"creative_specializations": cap.creative_specializations,
"tool_proficiency": cap.tool_proficiency,
"domain_knowledge": cap.domain_knowledge,
"creations_generated": cap.creations_generated,
"user_ratings": cap.user_ratings,
"expert_evaluations": cap.expert_evaluations,
"status": cap.status,
"certification_level": cap.certification_level,
"created_at": cap.created_at.isoformat(),
"last_evaluation": cap.last_evaluation.isoformat() if cap.last_evaluation else None
}
for cap in capabilities
]
except Exception as e:
logger.error(f"Error getting creative capabilities for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/fusion-performance")
async def get_fusion_performance_analytics(
session: SessionDep,
agent_ids: Optional[List[str]] = Query(default=[], description="List of agent IDs"),
fusion_type: Optional[str] = Query(default=None, description="Filter by fusion type"),
period: str = Query(default="7d", description="Time period")
) -> Dict[str, Any]:
"""Get fusion performance analytics"""
try:
query = select(FusionModel)
if fusion_type:
query = query.where(FusionModel.fusion_type == fusion_type)
models = session.exec(query).all()
# Filter by agent IDs if provided (by checking base models)
if agent_ids:
filtered_models = []
for model in models:
# Check if any base model belongs to specified agents
if any(agent_id in str(base_model) for base_model in model.base_models for agent_id in agent_ids):
filtered_models.append(model)
models = filtered_models
# Calculate analytics
total_models = len(models)
ready_models = len([m for m in models if m.status == "ready"])
if models:
avg_synergy = sum(m.synergy_score for m in models) / len(models)
avg_robustness = sum(m.robustness_score for m in models) / len(models)
# Performance metrics
performance_metrics = {}
for model in models:
if model.fusion_performance:
for metric, value in model.fusion_performance.items():
if metric not in performance_metrics:
performance_metrics[metric] = []
performance_metrics[metric].append(value)
avg_performance = {}
for metric, values in performance_metrics.items():
avg_performance[metric] = sum(values) / len(values)
# Fusion strategy distribution
strategy_distribution = {}
for model in models:
strategy = model.fusion_strategy
strategy_distribution[strategy] = strategy_distribution.get(strategy, 0) + 1
else:
avg_synergy = 0.0
avg_robustness = 0.0
avg_performance = {}
strategy_distribution = {}
return {
"period": period,
"total_models": total_models,
"ready_models": ready_models,
"readiness_rate": ready_models / total_models if total_models > 0 else 0.0,
"average_synergy_score": avg_synergy,
"average_robustness_score": avg_robustness,
"average_performance": avg_performance,
"strategy_distribution": strategy_distribution,
"top_performing_models": sorted(
[
{
"fusion_id": model.fusion_id,
"model_name": model.model_name,
"synergy_score": model.synergy_score,
"robustness_score": model.robustness_score,
"deployment_count": model.deployment_count
}
for model in models
],
key=lambda x: x["synergy_score"],
reverse=True
)[:10]
}
except Exception as e:
logger.error(f"Error getting fusion performance analytics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics/rl-performance")
async def get_rl_performance_analytics(
session: SessionDep,
agent_ids: Optional[List[str]] = Query(default=[], description="List of agent IDs"),
algorithm: Optional[str] = Query(default=None, description="Filter by algorithm"),
environment_type: Optional[str] = Query(default=None, description="Filter by environment type"),
period: str = Query(default="7d", description="Time period")
) -> Dict[str, Any]:
"""Get RL performance analytics"""
try:
query = select(ReinforcementLearningConfig)
if agent_ids:
query = query.where(ReinforcementLearningConfig.agent_id.in_(agent_ids))
if algorithm:
query = query.where(ReinforcementLearningConfig.algorithm == algorithm)
if environment_type:
query = query.where(ReinforcementLearningConfig.environment_type == environment_type)
configs = session.exec(query).all()
# Calculate analytics
total_configs = len(configs)
ready_configs = len([c for c in configs if c.status == "ready"])
if configs:
# Algorithm distribution
algorithm_distribution = {}
for config in configs:
alg = config.algorithm
algorithm_distribution[alg] = algorithm_distribution.get(alg, 0) + 1
# Environment distribution
environment_distribution = {}
for config in configs:
env = config.environment_type
environment_distribution[env] = environment_distribution.get(env, 0) + 1
# Performance metrics
final_performances = []
success_rates = []
convergence_episodes = []
for config in configs:
if config.reward_history:
final_performances.append(np.mean(config.reward_history[-10:]))
if config.success_rate_history:
success_rates.append(np.mean(config.success_rate_history[-10:]))
if config.convergence_episode:
convergence_episodes.append(config.convergence_episode)
avg_performance = np.mean(final_performances) if final_performances else 0.0
avg_success_rate = np.mean(success_rates) if success_rates else 0.0
avg_convergence = np.mean(convergence_episodes) if convergence_episodes else 0.0
else:
algorithm_distribution = {}
environment_distribution = {}
avg_performance = 0.0
avg_success_rate = 0.0
avg_convergence = 0.0
return {
"period": period,
"total_agents": len(set(c.agent_id for c in configs)),
"total_configs": total_configs,
"ready_configs": ready_configs,
"readiness_rate": ready_configs / total_configs if total_configs > 0 else 0.0,
"average_performance": avg_performance,
"average_success_rate": avg_success_rate,
"average_convergence_episode": avg_convergence,
"algorithm_distribution": algorithm_distribution,
"environment_distribution": environment_distribution,
"top_performing_agents": sorted(
[
{
"agent_id": config.agent_id,
"algorithm": config.algorithm,
"environment_type": config.environment_type,
"final_performance": np.mean(config.reward_history[-10:]) if config.reward_history else 0.0,
"convergence_episode": config.convergence_episode,
"deployment_count": config.deployment_count
}
for config in configs
],
key=lambda x: x["final_performance"],
reverse=True
)[:10]
}
except Exception as e:
logger.error(f"Error getting RL performance analytics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/health")
async def health_check() -> Dict[str, Any]:
"""Health check for multi-modal and RL services"""
return {
"status": "healthy",
"timestamp": datetime.utcnow().isoformat(),
"version": "1.0.0",
"services": {
"multi_modal_fusion_engine": "operational",
"advanced_rl_engine": "operational",
"marketplace_strategy_optimizer": "operational",
"cross_domain_capability_integrator": "operational"
}
}

View File

@@ -0,0 +1,524 @@
"""
Reputation Management API Endpoints
REST API for agent reputation, trust scores, and economic profiles
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.reputation_service import ReputationService
from ..domain.reputation import (
AgentReputation, CommunityFeedback, ReputationLevel,
TrustScoreCategory
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/reputation", tags=["reputation"])
# Pydantic models for API requests/responses
class ReputationProfileResponse(BaseModel):
"""Response model for reputation profile"""
agent_id: str
trust_score: float
reputation_level: str
performance_rating: float
reliability_score: float
community_rating: float
total_earnings: float
transaction_count: int
success_rate: float
jobs_completed: int
jobs_failed: int
average_response_time: float
dispute_count: int
certifications: List[str]
specialization_tags: List[str]
geographic_region: str
last_activity: str
recent_events: List[Dict[str, Any]]
recent_feedback: List[Dict[str, Any]]
class FeedbackRequest(BaseModel):
"""Request model for community feedback"""
reviewer_id: str
ratings: Dict[str, float] = Field(..., description="Overall, performance, communication, reliability, value ratings")
feedback_text: str = Field(default="", max_length=1000)
tags: List[str] = Field(default_factory=list)
class FeedbackResponse(BaseModel):
"""Response model for feedback submission"""
id: str
agent_id: str
reviewer_id: str
overall_rating: float
performance_rating: float
communication_rating: float
reliability_rating: float
value_rating: float
feedback_text: str
feedback_tags: List[str]
created_at: str
moderation_status: str
class JobCompletionRequest(BaseModel):
"""Request model for job completion recording"""
agent_id: str
job_id: str
success: bool
response_time: float = Field(..., gt=0, description="Response time in milliseconds")
earnings: float = Field(..., ge=0, description="Earnings in AITBC")
class TrustScoreResponse(BaseModel):
"""Response model for trust score breakdown"""
agent_id: str
composite_score: float
performance_score: float
reliability_score: float
community_score: float
security_score: float
economic_score: float
reputation_level: str
calculated_at: str
class LeaderboardEntry(BaseModel):
"""Leaderboard entry model"""
rank: int
agent_id: str
trust_score: float
reputation_level: str
performance_rating: float
reliability_score: float
community_rating: float
total_earnings: float
transaction_count: int
geographic_region: str
specialization_tags: List[str]
class ReputationMetricsResponse(BaseModel):
"""Response model for reputation metrics"""
total_agents: int
average_trust_score: float
level_distribution: Dict[str, int]
top_regions: List[Dict[str, Any]]
recent_activity: Dict[str, Any]
# API Endpoints
@router.get("/profile/{agent_id}", response_model=ReputationProfileResponse)
async def get_reputation_profile(
agent_id: str,
session: SessionDep
) -> ReputationProfileResponse:
"""Get comprehensive reputation profile for an agent"""
reputation_service = ReputationService(session)
try:
profile_data = await reputation_service.get_reputation_summary(agent_id)
if "error" in profile_data:
raise HTTPException(status_code=404, detail=profile_data["error"])
return ReputationProfileResponse(**profile_data)
except Exception as e:
logger.error(f"Error getting reputation profile for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/profile/{agent_id}")
async def create_reputation_profile(
agent_id: str,
session: SessionDep
) -> Dict[str, Any]:
"""Create a new reputation profile for an agent"""
reputation_service = ReputationService(session)
try:
reputation = await reputation_service.create_reputation_profile(agent_id)
return {
"message": "Reputation profile created successfully",
"agent_id": reputation.agent_id,
"trust_score": reputation.trust_score,
"reputation_level": reputation.reputation_level.value,
"created_at": reputation.created_at.isoformat()
}
except Exception as e:
logger.error(f"Error creating reputation profile for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/feedback/{agent_id}", response_model=FeedbackResponse)
async def add_community_feedback(
agent_id: str,
feedback_request: FeedbackRequest,
session: SessionDep
) -> FeedbackResponse:
"""Add community feedback for an agent"""
reputation_service = ReputationService(session)
try:
feedback = await reputation_service.add_community_feedback(
agent_id=agent_id,
reviewer_id=feedback_request.reviewer_id,
ratings=feedback_request.ratings,
feedback_text=feedback_request.feedback_text,
tags=feedback_request.tags
)
return FeedbackResponse(
id=feedback.id,
agent_id=feedback.agent_id,
reviewer_id=feedback.reviewer_id,
overall_rating=feedback.overall_rating,
performance_rating=feedback.performance_rating,
communication_rating=feedback.communication_rating,
reliability_rating=feedback.reliability_rating,
value_rating=feedback.value_rating,
feedback_text=feedback.feedback_text,
feedback_tags=feedback.feedback_tags,
created_at=feedback.created_at.isoformat(),
moderation_status=feedback.moderation_status
)
except Exception as e:
logger.error(f"Error adding feedback for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/job-completion")
async def record_job_completion(
job_request: JobCompletionRequest,
session: SessionDep
) -> Dict[str, Any]:
"""Record job completion and update reputation"""
reputation_service = ReputationService(session)
try:
reputation = await reputation_service.record_job_completion(
agent_id=job_request.agent_id,
job_id=job_request.job_id,
success=job_request.success,
response_time=job_request.response_time,
earnings=job_request.earnings
)
return {
"message": "Job completion recorded successfully",
"agent_id": reputation.agent_id,
"new_trust_score": reputation.trust_score,
"reputation_level": reputation.reputation_level.value,
"jobs_completed": reputation.jobs_completed,
"success_rate": reputation.success_rate,
"total_earnings": reputation.total_earnings
}
except Exception as e:
logger.error(f"Error recording job completion: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/trust-score/{agent_id}", response_model=TrustScoreResponse)
async def get_trust_score_breakdown(
agent_id: str,
session: SessionDep
) -> TrustScoreResponse:
"""Get detailed trust score breakdown for an agent"""
reputation_service = ReputationService(session)
calculator = reputation_service.calculator
try:
# Calculate individual components
performance_score = calculator.calculate_performance_score(agent_id, session)
reliability_score = calculator.calculate_reliability_score(agent_id, session)
community_score = calculator.calculate_community_score(agent_id, session)
security_score = calculator.calculate_security_score(agent_id, session)
economic_score = calculator.calculate_economic_score(agent_id, session)
# Calculate composite score
composite_score = calculator.calculate_composite_trust_score(agent_id, session)
reputation_level = calculator.determine_reputation_level(composite_score)
return TrustScoreResponse(
agent_id=agent_id,
composite_score=composite_score,
performance_score=performance_score,
reliability_score=reliability_score,
community_score=community_score,
security_score=security_score,
economic_score=economic_score,
reputation_level=reputation_level.value,
calculated_at=datetime.utcnow().isoformat()
)
except Exception as e:
logger.error(f"Error getting trust score breakdown for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/leaderboard", response_model=List[LeaderboardEntry])
async def get_reputation_leaderboard(
category: str = Query(default="trust_score", description="Category to rank by"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
region: Optional[str] = Query(default=None, description="Filter by region"),
session: SessionDep
) -> List[LeaderboardEntry]:
"""Get reputation leaderboard"""
reputation_service = ReputationService(session)
try:
leaderboard_data = await reputation_service.get_leaderboard(
category=category,
limit=limit,
region=region
)
return [LeaderboardEntry(**entry) for entry in leaderboard_data]
except Exception as e:
logger.error(f"Error getting leaderboard: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/metrics", response_model=ReputationMetricsResponse)
async def get_reputation_metrics(
session: SessionDep
) -> ReputationMetricsResponse:
"""Get overall reputation system metrics"""
try:
# Get all reputation profiles
reputations = session.exec(
select(AgentReputation)
).all()
if not reputations:
return ReputationMetricsResponse(
total_agents=0,
average_trust_score=0.0,
level_distribution={},
top_regions=[],
recent_activity={}
)
# Calculate metrics
total_agents = len(reputations)
average_trust_score = sum(r.trust_score for r in reputations) / total_agents
# Level distribution
level_counts = {}
for reputation in reputations:
level = reputation.reputation_level.value
level_counts[level] = level_counts.get(level, 0) + 1
# Top regions
region_counts = {}
for reputation in reputations:
region = reputation.geographic_region or "Unknown"
region_counts[region] = region_counts.get(region, 0) + 1
top_regions = [
{"region": region, "count": count}
for region, count in sorted(region_counts.items(), key=lambda x: x[1], reverse=True)[:10]
]
# Recent activity (last 24 hours)
recent_cutoff = datetime.utcnow() - timedelta(days=1)
recent_events = session.exec(
select(func.count(ReputationEvent.id)).where(
ReputationEvent.occurred_at >= recent_cutoff
)
).first()
recent_activity = {
"events_last_24h": recent_events[0] if recent_events else 0,
"active_agents": len([
r for r in reputations
if r.last_activity and r.last_activity >= recent_cutoff
])
}
return ReputationMetricsResponse(
total_agents=total_agents,
average_trust_score=average_trust_score,
level_distribution=level_counts,
top_regions=top_regions,
recent_activity=recent_activity
)
except Exception as e:
logger.error(f"Error getting reputation metrics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/feedback/{agent_id}")
async def get_agent_feedback(
agent_id: str,
limit: int = Query(default=10, ge=1, le=50),
session: SessionDep
) -> List[FeedbackResponse]:
"""Get community feedback for an agent"""
try:
feedbacks = session.exec(
select(CommunityFeedback)
.where(
and_(
CommunityFeedback.agent_id == agent_id,
CommunityFeedback.moderation_status == "approved"
)
)
.order_by(CommunityFeedback.created_at.desc())
.limit(limit)
).all()
return [
FeedbackResponse(
id=feedback.id,
agent_id=feedback.agent_id,
reviewer_id=feedback.reviewer_id,
overall_rating=feedback.overall_rating,
performance_rating=feedback.performance_rating,
communication_rating=feedback.communication_rating,
reliability_rating=feedback.reliability_rating,
value_rating=feedback.value_rating,
feedback_text=feedback.feedback_text,
feedback_tags=feedback.feedback_tags,
created_at=feedback.created_at.isoformat(),
moderation_status=feedback.moderation_status
)
for feedback in feedbacks
]
except Exception as e:
logger.error(f"Error getting feedback for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/events/{agent_id}")
async def get_reputation_events(
agent_id: str,
limit: int = Query(default=20, ge=1, le=100),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get reputation change events for an agent"""
try:
events = session.exec(
select(ReputationEvent)
.where(ReputationEvent.agent_id == agent_id)
.order_by(ReputationEvent.occurred_at.desc())
.limit(limit)
).all()
return [
{
"id": event.id,
"event_type": event.event_type,
"event_subtype": event.event_subtype,
"impact_score": event.impact_score,
"trust_score_before": event.trust_score_before,
"trust_score_after": event.trust_score_after,
"reputation_level_before": event.reputation_level_before.value if event.reputation_level_before else None,
"reputation_level_after": event.reputation_level_after.value if event.reputation_level_after else None,
"occurred_at": event.occurred_at.isoformat(),
"event_data": event.event_data
}
for event in events
]
except Exception as e:
logger.error(f"Error getting reputation events for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.put("/profile/{agent_id}/specialization")
async def update_specialization(
agent_id: str,
specialization_tags: List[str],
session: SessionDep
) -> Dict[str, Any]:
"""Update agent specialization tags"""
try:
reputation = session.exec(
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
).first()
if not reputation:
raise HTTPException(status_code=404, detail="Reputation profile not found")
reputation.specialization_tags = specialization_tags
reputation.updated_at = datetime.utcnow()
session.commit()
session.refresh(reputation)
return {
"message": "Specialization tags updated successfully",
"agent_id": agent_id,
"specialization_tags": reputation.specialization_tags,
"updated_at": reputation.updated_at.isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating specialization for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.put("/profile/{agent_id}/region")
async def update_region(
agent_id: str,
region: str,
session: SessionDep
) -> Dict[str, Any]:
"""Update agent geographic region"""
try:
reputation = session.exec(
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
).first()
if not reputation:
raise HTTPException(status_code=404, detail="Reputation profile not found")
reputation.geographic_region = region
reputation.updated_at = datetime.utcnow()
session.commit()
session.refresh(reputation)
return {
"message": "Geographic region updated successfully",
"agent_id": agent_id,
"geographic_region": reputation.geographic_region,
"updated_at": reputation.updated_at.isoformat()
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating region for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -0,0 +1,565 @@
"""
Reward System API Endpoints
REST API for agent rewards, incentives, and performance-based earnings
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.reward_service import RewardEngine
from ..domain.rewards import (
AgentRewardProfile, RewardTier, RewardType, RewardStatus
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/rewards", tags=["rewards"])
# Pydantic models for API requests/responses
class RewardProfileResponse(BaseModel):
"""Response model for reward profile"""
agent_id: str
current_tier: str
tier_progress: float
base_earnings: float
bonus_earnings: float
total_earnings: float
lifetime_earnings: float
rewards_distributed: int
current_streak: int
longest_streak: int
performance_score: float
loyalty_score: float
referral_count: int
community_contributions: int
last_reward_date: Optional[str]
recent_calculations: List[Dict[str, Any]]
recent_distributions: List[Dict[str, Any]]
class RewardRequest(BaseModel):
"""Request model for reward calculation and distribution"""
agent_id: str
reward_type: RewardType
base_amount: float = Field(..., gt=0, description="Base reward amount in AITBC")
performance_metrics: Dict[str, Any] = Field(..., description="Performance metrics for bonus calculation")
reference_date: Optional[str] = Field(default=None, description="Reference date for calculation")
class RewardResponse(BaseModel):
"""Response model for reward distribution"""
calculation_id: str
distribution_id: str
reward_amount: float
reward_type: str
tier_multiplier: float
total_bonus: float
status: str
class RewardAnalyticsResponse(BaseModel):
"""Response model for reward analytics"""
period_type: str
start_date: str
end_date: str
total_rewards_distributed: float
total_agents_rewarded: int
average_reward_per_agent: float
tier_distribution: Dict[str, int]
total_distributions: int
class TierProgressResponse(BaseModel):
"""Response model for tier progress"""
agent_id: str
current_tier: str
next_tier: Optional[str]
tier_progress: float
trust_score: float
requirements_met: Dict[str, bool]
benefits: Dict[str, Any]
class BatchProcessResponse(BaseModel):
"""Response model for batch processing"""
processed: int
failed: int
total: int
class MilestoneResponse(BaseModel):
"""Response model for milestone achievements"""
id: str
agent_id: str
milestone_type: str
milestone_name: str
target_value: float
current_value: float
progress_percentage: float
reward_amount: float
is_completed: bool
is_claimed: bool
completed_at: Optional[str]
claimed_at: Optional[str]
# API Endpoints
@router.get("/profile/{agent_id}", response_model=RewardProfileResponse)
async def get_reward_profile(
agent_id: str,
session: SessionDep
) -> RewardProfileResponse:
"""Get comprehensive reward profile for an agent"""
reward_engine = RewardEngine(session)
try:
profile_data = await reward_engine.get_reward_summary(agent_id)
if "error" in profile_data:
raise HTTPException(status_code=404, detail=profile_data["error"])
return RewardProfileResponse(**profile_data)
except Exception as e:
logger.error(f"Error getting reward profile for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/profile/{agent_id}")
async def create_reward_profile(
agent_id: str,
session: SessionDep
) -> Dict[str, Any]:
"""Create a new reward profile for an agent"""
reward_engine = RewardEngine(session)
try:
profile = await reward_engine.create_reward_profile(agent_id)
return {
"message": "Reward profile created successfully",
"agent_id": profile.agent_id,
"current_tier": profile.current_tier.value,
"tier_progress": profile.tier_progress,
"created_at": profile.created_at.isoformat()
}
except Exception as e:
logger.error(f"Error creating reward profile for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/calculate-and-distribute", response_model=RewardResponse)
async def calculate_and_distribute_reward(
reward_request: RewardRequest,
session: SessionDep
) -> RewardResponse:
"""Calculate and distribute reward for an agent"""
reward_engine = RewardEngine(session)
try:
# Parse reference date if provided
reference_date = None
if reward_request.reference_date:
reference_date = datetime.fromisoformat(reward_request.reference_date)
# Calculate and distribute reward
result = await reward_engine.calculate_and_distribute_reward(
agent_id=reward_request.agent_id,
reward_type=reward_request.reward_type,
base_amount=reward_request.base_amount,
performance_metrics=reward_request.performance_metrics,
reference_date=reference_date
)
return RewardResponse(
calculation_id=result["calculation_id"],
distribution_id=result["distribution_id"],
reward_amount=result["reward_amount"],
reward_type=result["reward_type"],
tier_multiplier=result["tier_multiplier"],
total_bonus=result["total_bonus"],
status=result["status"]
)
except Exception as e:
logger.error(f"Error calculating and distributing reward: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/tier-progress/{agent_id}", response_model=TierProgressResponse)
async def get_tier_progress(
agent_id: str,
session: SessionDep
) -> TierProgressResponse:
"""Get tier progress information for an agent"""
reward_engine = RewardEngine(session)
try:
# Get reward profile
profile = session.exec(
select(AgentRewardProfile).where(AgentRewardProfile.agent_id == agent_id)
).first()
if not profile:
raise HTTPException(status_code=404, detail="Reward profile not found")
# Get reputation for trust score
from ..domain.reputation import AgentReputation
reputation = session.exec(
select(AgentReputation).where(AgentReputation.agent_id == agent_id)
).first()
trust_score = reputation.trust_score if reputation else 500.0
# Determine next tier
current_tier = profile.current_tier
next_tier = None
if current_tier == RewardTier.BRONZE:
next_tier = RewardTier.SILVER
elif current_tier == RewardTier.SILVER:
next_tier = RewardTier.GOLD
elif current_tier == RewardTier.GOLD:
next_tier = RewardTier.PLATINUM
elif current_tier == RewardTier.PLATINUM:
next_tier = RewardTier.DIAMOND
# Calculate requirements met
requirements_met = {
"minimum_trust_score": trust_score >= 400,
"minimum_performance": profile.performance_score >= 3.0,
"minimum_activity": profile.rewards_distributed >= 1,
"minimum_earnings": profile.total_earnings >= 0.1
}
# Get tier benefits
tier_benefits = {
"max_concurrent_jobs": 1,
"priority_boost": 1.0,
"fee_discount": 0.0,
"support_level": "basic"
}
if current_tier == RewardTier.SILVER:
tier_benefits.update({
"max_concurrent_jobs": 2,
"priority_boost": 1.1,
"fee_discount": 5.0,
"support_level": "priority"
})
elif current_tier == RewardTier.GOLD:
tier_benefits.update({
"max_concurrent_jobs": 3,
"priority_boost": 1.2,
"fee_discount": 10.0,
"support_level": "priority"
})
elif current_tier == RewardTier.PLATINUM:
tier_benefits.update({
"max_concurrent_jobs": 5,
"priority_boost": 1.5,
"fee_discount": 15.0,
"support_level": "premium"
})
elif current_tier == RewardTier.DIAMOND:
tier_benefits.update({
"max_concurrent_jobs": 10,
"priority_boost": 2.0,
"fee_discount": 20.0,
"support_level": "premium"
})
return TierProgressResponse(
agent_id=agent_id,
current_tier=current_tier.value,
next_tier=next_tier.value if next_tier else None,
tier_progress=profile.tier_progress,
trust_score=trust_score,
requirements_met=requirements_met,
benefits=tier_benefits
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting tier progress for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/batch-process", response_model=BatchProcessResponse)
async def batch_process_pending_rewards(
limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of rewards to process"),
session: SessionDep
) -> BatchProcessResponse:
"""Process pending reward distributions in batch"""
reward_engine = RewardEngine(session)
try:
result = await reward_engine.batch_process_pending_rewards(limit)
return BatchProcessResponse(
processed=result["processed"],
failed=result["failed"],
total=result["total"]
)
except Exception as e:
logger.error(f"Error batch processing rewards: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics", response_model=RewardAnalyticsResponse)
async def get_reward_analytics(
period_type: str = Query(default="daily", description="Period type: daily, weekly, monthly"),
start_date: Optional[str] = Query(default=None, description="Start date (ISO format)"),
end_date: Optional[str] = Query(default=None, description="End date (ISO format)"),
session: SessionDep
) -> RewardAnalyticsResponse:
"""Get reward system analytics"""
reward_engine = RewardEngine(session)
try:
# Parse dates if provided
start_dt = None
end_dt = None
if start_date:
start_dt = datetime.fromisoformat(start_date)
if end_date:
end_dt = datetime.fromisoformat(end_date)
analytics_data = await reward_engine.get_reward_analytics(
period_type=period_type,
start_date=start_dt,
end_date=end_dt
)
return RewardAnalyticsResponse(**analytics_data)
except Exception as e:
logger.error(f"Error getting reward analytics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/leaderboard")
async def get_reward_leaderboard(
tier: Optional[str] = Query(default=None, description="Filter by tier"),
period: str = Query(default="weekly", description="Period: daily, weekly, monthly"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get reward leaderboard"""
try:
# Calculate date range based on period
if period == "daily":
start_date = datetime.utcnow() - timedelta(days=1)
elif period == "weekly":
start_date = datetime.utcnow() - timedelta(days=7)
elif period == "monthly":
start_date = datetime.utcnow() - timedelta(days=30)
else:
start_date = datetime.utcnow() - timedelta(days=7)
# Query reward profiles
query = select(AgentRewardProfile).where(
AgentRewardProfile.last_activity >= start_date
)
if tier:
query = query.where(AgentRewardProfile.current_tier == tier)
profiles = session.exec(
query.order_by(AgentRewardProfile.total_earnings.desc()).limit(limit)
).all()
leaderboard = []
for rank, profile in enumerate(profiles, 1):
leaderboard.append({
"rank": rank,
"agent_id": profile.agent_id,
"current_tier": profile.current_tier.value,
"total_earnings": profile.total_earnings,
"lifetime_earnings": profile.lifetime_earnings,
"rewards_distributed": profile.rewards_distributed,
"current_streak": profile.current_streak,
"performance_score": profile.performance_score
})
return leaderboard
except Exception as e:
logger.error(f"Error getting reward leaderboard: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/tiers")
async def get_reward_tiers(
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get reward tier configurations"""
try:
from ..domain.rewards import RewardTierConfig
tier_configs = session.exec(
select(RewardTierConfig).where(RewardTierConfig.is_active == True)
).all()
tiers = []
for config in tier_configs:
tiers.append({
"tier": config.tier.value,
"min_trust_score": config.min_trust_score,
"base_multiplier": config.base_multiplier,
"performance_bonus_multiplier": config.performance_bonus_multiplier,
"max_concurrent_jobs": config.max_concurrent_jobs,
"priority_boost": config.priority_boost,
"fee_discount": config.fee_discount,
"support_level": config.support_level,
"tier_requirements": config.tier_requirements,
"tier_benefits": config.tier_benefits
})
return sorted(tiers, key=lambda x: x["min_trust_score"])
except Exception as e:
logger.error(f"Error getting reward tiers: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/milestones/{agent_id}")
async def get_agent_milestones(
agent_id: str,
include_completed: bool = Query(default=True, description="Include completed milestones"),
session: SessionDep
) -> List[MilestoneResponse]:
"""Get milestones for an agent"""
try:
from ..domain.rewards import RewardMilestone
query = select(RewardMilestone).where(RewardMilestone.agent_id == agent_id)
if not include_completed:
query = query.where(RewardMilestone.is_completed == False)
milestones = session.exec(
query.order_by(RewardMilestone.created_at.desc())
).all()
return [
MilestoneResponse(
id=milestone.id,
agent_id=milestone.agent_id,
milestone_type=milestone.milestone_type,
milestone_name=milestone.milestone_name,
target_value=milestone.target_value,
current_value=milestone.current_value,
progress_percentage=milestone.progress_percentage,
reward_amount=milestone.reward_amount,
is_completed=milestone.is_completed,
is_claimed=milestone.is_claimed,
completed_at=milestone.completed_at.isoformat() if milestone.completed_at else None,
claimed_at=milestone.claimed_at.isoformat() if milestone.claimed_at else None
)
for milestone in milestones
]
except Exception as e:
logger.error(f"Error getting milestones for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/distributions/{agent_id}")
async def get_reward_distributions(
agent_id: str,
limit: int = Query(default=20, ge=1, le=100),
status: Optional[str] = Query(default=None, description="Filter by status"),
session: SessionDep
) -> List[Dict[str, Any]]:
"""Get reward distribution history for an agent"""
try:
from ..domain.rewards import RewardDistribution
query = select(RewardDistribution).where(RewardDistribution.agent_id == agent_id)
if status:
query = query.where(RewardDistribution.status == status)
distributions = session.exec(
query.order_by(RewardDistribution.created_at.desc()).limit(limit)
).all()
return [
{
"id": distribution.id,
"reward_amount": distribution.reward_amount,
"reward_type": distribution.reward_type.value,
"status": distribution.status.value,
"distribution_method": distribution.distribution_method,
"transaction_id": distribution.transaction_id,
"transaction_status": distribution.transaction_status,
"created_at": distribution.created_at.isoformat(),
"processed_at": distribution.processed_at.isoformat() if distribution.processed_at else None,
"error_message": distribution.error_message
}
for distribution in distributions
]
except Exception as e:
logger.error(f"Error getting distributions for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/simulate-reward")
async def simulate_reward_calculation(
reward_request: RewardRequest,
session: SessionDep
) -> Dict[str, Any]:
"""Simulate reward calculation without distributing"""
reward_engine = RewardEngine(session)
try:
# Ensure reward profile exists
await reward_engine.create_reward_profile(reward_request.agent_id)
# Calculate reward only (no distribution)
reward_calculation = reward_engine.calculator.calculate_total_reward(
reward_request.agent_id,
reward_request.base_amount,
reward_request.performance_metrics,
session
)
return {
"agent_id": reward_request.agent_id,
"reward_type": reward_request.reward_type.value,
"base_amount": reward_request.base_amount,
"tier_multiplier": reward_calculation["tier_multiplier"],
"performance_bonus": reward_calculation["performance_bonus"],
"loyalty_bonus": reward_calculation["loyalty_bonus"],
"referral_bonus": reward_calculation["referral_bonus"],
"milestone_bonus": reward_calculation["milestone_bonus"],
"effective_multiplier": reward_calculation["effective_multiplier"],
"total_reward": reward_calculation["total_reward"],
"trust_score": reward_calculation["trust_score"],
"simulation": True
}
except Exception as e:
logger.error(f"Error simulating reward calculation: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")

View File

@@ -0,0 +1,722 @@
"""
P2P Trading Protocol API Endpoints
REST API for agent-to-agent trading, matching, negotiation, and settlement
"""
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from ..storage import SessionDep
from ..services.trading_service import P2PTradingProtocol
from ..domain.trading import (
TradeRequest, TradeMatch, TradeNegotiation, TradeAgreement, TradeSettlement,
TradeStatus, TradeType, NegotiationStatus, SettlementType
)
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/v1/trading", tags=["trading"])
# Pydantic models for API requests/responses
class TradeRequestRequest(BaseModel):
"""Request model for creating trade request"""
buyer_agent_id: str
trade_type: TradeType
title: str = Field(..., max_length=200)
description: str = Field(default="", max_length=1000)
requirements: Dict[str, Any] = Field(..., description="Trade requirements and specifications")
budget_range: Dict[str, float] = Field(..., description="Budget range with min and max")
start_time: Optional[str] = Field(default=None, description="Start time (ISO format)")
end_time: Optional[str] = Field(default=None, description="End time (ISO format)")
duration_hours: Optional[int] = Field(default=None, description="Duration in hours")
urgency_level: str = Field(default="normal", description="urgency level")
preferred_regions: List[str] = Field(default_factory=list, description="Preferred regions")
excluded_regions: List[str] = Field(default_factory=list, description="Excluded regions")
service_level_required: str = Field(default="standard", description="Service level required")
tags: List[str] = Field(default_factory=list, description="Trade tags")
expires_at: Optional[str] = Field(default=None, description="Expiration time (ISO format)")
class TradeRequestResponse(BaseModel):
"""Response model for trade request"""
request_id: str
buyer_agent_id: str
trade_type: str
title: str
description: str
requirements: Dict[str, Any]
budget_range: Dict[str, float]
status: str
match_count: int
best_match_score: float
created_at: str
updated_at: str
expires_at: Optional[str]
class TradeMatchResponse(BaseModel):
"""Response model for trade match"""
match_id: str
request_id: str
buyer_agent_id: str
seller_agent_id: str
match_score: float
confidence_level: float
price_compatibility: float
specification_compatibility: float
timing_compatibility: float
reputation_compatibility: float
geographic_compatibility: float
seller_offer: Dict[str, Any]
proposed_terms: Dict[str, Any]
status: str
created_at: str
expires_at: Optional[str]
class NegotiationRequest(BaseModel):
"""Request model for initiating negotiation"""
match_id: str
initiator: str = Field(..., description="negotiation initiator: buyer or seller")
strategy: str = Field(default="balanced", description="negotiation strategy")
class NegotiationResponse(BaseModel):
"""Response model for negotiation"""
negotiation_id: str
match_id: str
buyer_agent_id: str
seller_agent_id: str
status: str
negotiation_round: int
current_terms: Dict[str, Any]
negotiation_strategy: str
auto_accept_threshold: float
created_at: str
started_at: Optional[str]
expires_at: Optional[str]
class AgreementResponse(BaseModel):
"""Response model for trade agreement"""
agreement_id: str
negotiation_id: str
buyer_agent_id: str
seller_agent_id: str
trade_type: str
title: str
agreed_terms: Dict[str, Any]
total_price: float
settlement_type: str
status: str
created_at: str
signed_at: str
starts_at: Optional[str]
ends_at: Optional[str]
class SettlementResponse(BaseModel):
"""Response model for settlement"""
settlement_id: str
agreement_id: str
settlement_type: str
total_amount: float
currency: str
payment_status: str
transaction_id: Optional[str]
platform_fee: float
net_amount_seller: float
status: str
initiated_at: str
processed_at: Optional[str]
completed_at: Optional[str]
class TradingSummaryResponse(BaseModel):
"""Response model for trading summary"""
agent_id: str
trade_requests: int
trade_matches: int
negotiations: int
agreements: int
success_rate: float
average_match_score: float
total_trade_volume: float
recent_activity: Dict[str, Any]
# API Endpoints
@router.post("/requests", response_model=TradeRequestResponse)
async def create_trade_request(
request_data: TradeRequestRequest,
session: SessionDep
) -> TradeRequestResponse:
"""Create a new trade request"""
trading_protocol = P2PTradingProtocol(session)
try:
# Parse optional datetime fields
start_time = None
end_time = None
expires_at = None
if request_data.start_time:
start_time = datetime.fromisoformat(request_data.start_time)
if request_data.end_time:
end_time = datetime.fromisoformat(request_data.end_time)
if request_data.expires_at:
expires_at = datetime.fromisoformat(request_data.expires_at)
# Create trade request
trade_request = await trading_protocol.create_trade_request(
buyer_agent_id=request_data.buyer_agent_id,
trade_type=request_data.trade_type,
title=request_data.title,
description=request_data.description,
requirements=request_data.requirements,
budget_range=request_data.budget_range,
start_time=start_time,
end_time=end_time,
duration_hours=request_data.duration_hours,
urgency_level=request_data.urgency_level,
preferred_regions=request_data.preferred_regions,
excluded_regions=request_data.excluded_regions,
service_level_required=request_data.service_level_required,
tags=request_data.tags,
expires_at=expires_at
)
return TradeRequestResponse(
request_id=trade_request.request_id,
buyer_agent_id=trade_request.buyer_agent_id,
trade_type=trade_request.trade_type.value,
title=trade_request.title,
description=trade_request.description,
requirements=trade_request.requirements,
budget_range=trade_request.budget_range,
status=trade_request.status.value,
match_count=trade_request.match_count,
best_match_score=trade_request.best_match_score,
created_at=trade_request.created_at.isoformat(),
updated_at=trade_request.updated_at.isoformat(),
expires_at=trade_request.expires_at.isoformat() if trade_request.expires_at else None
)
except Exception as e:
logger.error(f"Error creating trade request: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/requests/{request_id}", response_model=TradeRequestResponse)
async def get_trade_request(
request_id: str,
session: SessionDep
) -> TradeRequestResponse:
"""Get trade request details"""
try:
trade_request = session.exec(
select(TradeRequest).where(TradeRequest.request_id == request_id)
).first()
if not trade_request:
raise HTTPException(status_code=404, detail="Trade request not found")
return TradeRequestResponse(
request_id=trade_request.request_id,
buyer_agent_id=trade_request.buyer_agent_id,
trade_type=trade_request.trade_type.value,
title=trade_request.title,
description=trade_request.description,
requirements=trade_request.requirements,
budget_range=trade_request.budget_range,
status=trade_request.status.value,
match_count=trade_request.match_count,
best_match_score=trade_request.best_match_score,
created_at=trade_request.created_at.isoformat(),
updated_at=trade_request.updated_at.isoformat(),
expires_at=trade_request.expires_at.isoformat() if trade_request.expires_at else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting trade request {request_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/requests/{request_id}/matches")
async def find_matches(
request_id: str,
session: SessionDep
) -> List[str]:
"""Find matching sellers for a trade request"""
trading_protocol = P2PTradingProtocol(session)
try:
matches = await trading_protocol.find_matches(request_id)
return matches
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error finding matches for request {request_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/requests/{request_id}/matches")
async def get_trade_matches(
request_id: str,
session: SessionDep
) -> List[TradeMatchResponse]:
"""Get trade matches for a request"""
try:
matches = session.exec(
select(TradeMatch).where(TradeMatch.request_id == request_id)
.order_by(TradeMatch.match_score.desc())
).all()
return [
TradeMatchResponse(
match_id=match.match_id,
request_id=match.request_id,
buyer_agent_id=match.buyer_agent_id,
seller_agent_id=match.seller_agent_id,
match_score=match.match_score,
confidence_level=match.confidence_level,
price_compatibility=match.price_compatibility,
specification_compatibility=match.specification_compatibility,
timing_compatibility=match.timing_compatibility,
reputation_compatibility=match.reputation_compatibility,
geographic_compatibility=match.geographic_compatibility,
seller_offer=match.seller_offer,
proposed_terms=match.proposed_terms,
status=match.status.value,
created_at=match.created_at.isoformat(),
expires_at=match.expires_at.isoformat() if match.expires_at else None
)
for match in matches
]
except Exception as e:
logger.error(f"Error getting trade matches for request {request_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/negotiations", response_model=NegotiationResponse)
async def initiate_negotiation(
negotiation_data: NegotiationRequest,
session: SessionDep
) -> NegotiationResponse:
"""Initiate negotiation between buyer and seller"""
trading_protocol = P2PTradingProtocol(session)
try:
negotiation = await trading_protocol.initiate_negotiation(
match_id=negotiation_data.match_id,
initiator=negotiation_data.initiator,
strategy=negotiation_data.strategy
)
return NegotiationResponse(
negotiation_id=negotiation.negotiation_id,
match_id=negotiation.match_id,
buyer_agent_id=negotiation.buyer_agent_id,
seller_agent_id=negotiation.seller_agent_id,
status=negotiation.status.value,
negotiation_round=negotiation.negotiation_round,
current_terms=negotiation.current_terms,
negotiation_strategy=negotiation.negotiation_strategy,
auto_accept_threshold=negotiation.auto_accept_threshold,
created_at=negotiation.created_at.isoformat(),
started_at=negotiation.started_at.isoformat() if negotiation.started_at else None,
expires_at=negotiation.expires_at.isoformat() if negotiation.expires_at else None
)
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Error initiating negotiation: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/negotiations/{negotiation_id}", response_model=NegotiationResponse)
async def get_negotiation(
negotiation_id: str,
session: SessionDep
) -> NegotiationResponse:
"""Get negotiation details"""
try:
negotiation = session.exec(
select(TradeNegotiation).where(TradeNegotiation.negotiation_id == negotiation_id)
).first()
if not negotiation:
raise HTTPException(status_code=404, detail="Negotiation not found")
return NegotiationResponse(
negotiation_id=negotiation.negotiation_id,
match_id=negotiation.match_id,
buyer_agent_id=negotiation.buyer_agent_id,
seller_agent_id=negotiation.seller_agent_id,
status=negotiation.status.value,
negotiation_round=negotiation.negotiation_round,
current_terms=negotiation.current_terms,
negotiation_strategy=negotiation.negotiation_strategy,
auto_accept_threshold=negotiation.auto_accept_threshold,
created_at=negotiation.created_at.isoformat(),
started_at=negotiation.started_at.isoformat() if negotiation.started_at else None,
expires_at=negotiation.expires_at.isoformat() if negotiation.expires_at else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting negotiation {negotiation_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/matches/{match_id}")
async def get_trade_match(
match_id: str,
session: SessionDep
) -> TradeMatchResponse:
"""Get trade match details"""
try:
match = session.exec(
select(TradeMatch).where(TradeMatch.match_id == match_id)
).first()
if not match:
raise HTTPException(status_code=404, detail="Trade match not found")
return TradeMatchResponse(
match_id=match.match_id,
request_id=match.request_id,
buyer_agent_id=match.buyer_agent_id,
seller_agent_id=match.seller_agent_id,
match_score=match.match_score,
confidence_level=match.confidence_level,
price_compatibility=match.price_compatibility,
specification_compatibility=match.specification_compatibility,
timing_compatibility=match.timing_compatibility,
reputation_compatibility=match.reputation_compatibility,
geographic_compatibility=match.geographic_compatibility,
seller_offer=match.seller_offer,
proposed_terms=match.proposed_terms,
status=match.status.value,
created_at=match.created_at.isoformat(),
expires_at=match.expires_at.isoformat() if match.expires_at else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting trade match {match_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/agents/{agent_id}/summary", response_model=TradingSummaryResponse)
async def get_trading_summary(
agent_id: str,
session: SessionDep
) -> TradingSummaryResponse:
"""Get comprehensive trading summary for an agent"""
trading_protocol = P2PTradingProtocol(session)
try:
summary = await trading_protocol.get_trading_summary(agent_id)
return TradingSummaryResponse(**summary)
except Exception as e:
logger.error(f"Error getting trading summary for {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/requests")
async def list_trade_requests(
agent_id: Optional[str] = Query(default=None, description="Filter by agent ID"),
trade_type: Optional[str] = Query(default=None, description="Filter by trade type"),
status: Optional[str] = Query(default=None, description="Filter by status"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[TradeRequestResponse]:
"""List trade requests with filters"""
try:
query = select(TradeRequest)
if agent_id:
query = query.where(TradeRequest.buyer_agent_id == agent_id)
if trade_type:
query = query.where(TradeRequest.trade_type == trade_type)
if status:
query = query.where(TradeRequest.status == status)
requests = session.exec(
query.order_by(TradeRequest.created_at.desc()).limit(limit)
).all()
return [
TradeRequestResponse(
request_id=request.request_id,
buyer_agent_id=request.buyer_agent_id,
trade_type=request.trade_type.value,
title=request.title,
description=request.description,
requirements=request.requirements,
budget_range=request.budget_range,
status=request.status.value,
match_count=request.match_count,
best_match_score=request.best_match_score,
created_at=request.created_at.isoformat(),
updated_at=request.updated_at.isoformat(),
expires_at=request.expires_at.isoformat() if request.expires_at else None
)
for request in requests
]
except Exception as e:
logger.error(f"Error listing trade requests: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/matches")
async def list_trade_matches(
agent_id: Optional[str] = Query(default=None, description="Filter by agent ID"),
min_score: Optional[float] = Query(default=None, description="Minimum match score"),
status: Optional[str] = Query(default=None, description="Filter by status"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[TradeMatchResponse]:
"""List trade matches with filters"""
try:
query = select(TradeMatch)
if agent_id:
query = query.where(
or_(
TradeMatch.buyer_agent_id == agent_id,
TradeMatch.seller_agent_id == agent_id
)
)
if min_score:
query = query.where(TradeMatch.match_score >= min_score)
if status:
query = query.where(TradeMatch.status == status)
matches = session.exec(
query.order_by(TradeMatch.match_score.desc()).limit(limit)
).all()
return [
TradeMatchResponse(
match_id=match.match_id,
request_id=match.request_id,
buyer_agent_id=match.buyer_agent_id,
seller_agent_id=match.seller_agent_id,
match_score=match.match_score,
confidence_level=match.confidence_level,
price_compatibility=match.price_compatibility,
specification_compatibility=match.specification_compatibility,
timing_compatibility=match.timing_compatibility,
reputation_compatibility=match.reputation_compatibility,
geographic_compatibility=match.geographic_compatibility,
seller_offer=match.seller_offer,
proposed_terms=match.proposed_terms,
status=match.status.value,
created_at=match.created_at.isoformat(),
expires_at=match.expires_at.isoformat() if match.expires_at else None
)
for match in matches
]
except Exception as e:
logger.error(f"Error listing trade matches: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/negotiations")
async def list_negotiations(
agent_id: Optional[str] = Query(default=None, description="Filter by agent ID"),
status: Optional[str] = Query(default=None, description="Filter by status"),
strategy: Optional[str] = Query(default=None, description="Filter by strategy"),
limit: int = Query(default=50, ge=1, le=100, description="Number of results"),
session: SessionDep
) -> List[NegotiationResponse]:
"""List negotiations with filters"""
try:
query = select(TradeNegotiation)
if agent_id:
query = query.where(
or_(
TradeNegotiation.buyer_agent_id == agent_id,
TradeNegotiation.seller_agent_id == agent_id
)
)
if status:
query = query.where(TradeNegotiation.status == status)
if strategy:
query = query.where(TradeNegotiation.negotiation_strategy == strategy)
negotiations = session.exec(
query.order_by(TradeNegotiation.created_at.desc()).limit(limit)
).all()
return [
NegotiationResponse(
negotiation_id=negotiation.negotiation_id,
match_id=negotiation.match_id,
buyer_agent_id=negotiation.buyer_agent_id,
seller_agent_id=negotiation.seller_agent_id,
status=negotiation.status.value,
negotiation_round=negotiation.negotiation_round,
current_terms=negotiation.current_terms,
negotiation_strategy=negotiation.negotiation_strategy,
auto_accept_threshold=negotiation.auto_accept_threshold,
created_at=negotiation.created_at.isoformat(),
started_at=negotiation.started_at.isoformat() if negotiation.started_at else None,
expires_at=negotiation.expires_at.isoformat() if negotiation.expires_at else None
)
for negotiation in negotiations
]
except Exception as e:
logger.error(f"Error listing negotiations: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/analytics")
async def get_trading_analytics(
period_type: str = Query(default="daily", description="Period type: daily, weekly, monthly"),
start_date: Optional[str] = Query(default=None, description="Start date (ISO format)"),
end_date: Optional[str] = Query(default=None, description="End date (ISO format)"),
session: SessionDep
) -> Dict[str, Any]:
"""Get P2P trading analytics"""
try:
# Parse dates if provided
start_dt = None
end_dt = None
if start_date:
start_dt = datetime.fromisoformat(start_date)
if end_date:
end_dt = datetime.fromisoformat(end_date)
if not start_dt:
start_dt = datetime.utcnow() - timedelta(days=30)
if not end_dt:
end_dt = datetime.utcnow()
# Get analytics data (mock implementation)
# In real implementation, this would query TradingAnalytics table
analytics = {
"period_type": period_type,
"start_date": start_dt.isoformat(),
"end_date": end_dt.isoformat(),
"total_trades": 150,
"completed_trades": 120,
"failed_trades": 15,
"cancelled_trades": 15,
"total_trade_volume": 7500.0,
"average_trade_value": 50.0,
"success_rate": 80.0,
"trade_type_distribution": {
"ai_power": 60,
"compute_resources": 30,
"data_services": 25,
"model_services": 20,
"inference_tasks": 15
},
"active_buyers": 45,
"active_sellers": 38,
"new_agents": 12,
"average_matching_time": 15.5, # minutes
"average_negotiation_time": 45.2, # minutes
"average_settlement_time": 8.7, # minutes
"regional_distribution": {
"us-east": 35,
"us-west": 28,
"eu-central": 22,
"ap-southeast": 18,
"ap-northeast": 15
}
}
return analytics
except Exception as e:
logger.error(f"Error getting trading analytics: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/simulate-match")
async def simulate_trade_matching(
request_data: TradeRequestRequest,
session: SessionDep
) -> Dict[str, Any]:
"""Simulate trade matching without creating actual request"""
trading_protocol = P2PTradingProtocol(session)
try:
# Create temporary trade request for simulation
temp_request = TradeRequest(
request_id=f"sim_{uuid4().hex[:8]}",
buyer_agent_id=request_data.buyer_agent_id,
trade_type=request_data.trade_type,
title=request_data.title,
description=request_data.description,
requirements=request_data.requirements,
specifications=request_data.requirements.get('specifications', {}),
budget_range=request_data.budget_range,
preferred_regions=request_data.preferred_regions,
excluded_regions=request_data.excluded_regions,
service_level_required=request_data.service_level_required
)
# Get available sellers
seller_offers = await trading_protocol.get_available_sellers(temp_request)
seller_reputations = await trading_protocol.get_seller_reputations(
[offer['agent_id'] for offer in seller_offers]
)
# Find matches
matches = trading_protocol.matching_engine.find_matches(
temp_request, seller_offers, seller_reputations
)
return {
"simulation": True,
"request_details": {
"trade_type": request_data.trade_type.value,
"budget_range": request_data.budget_range,
"requirements": request_data.requirements
},
"available_sellers": len(seller_offers),
"matches_found": len(matches),
"best_matches": matches[:5], # Top 5 matches
"average_match_score": sum(m['match_score'] for m in matches) / len(matches) if matches else 0.0
}
except Exception as e:
logger.error(f"Error simulating trade matching: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")