fix: resolve remaining CI issues — services, hardhat, Rust, mypy
All checks were successful
API Endpoint Tests / test-api-endpoints (push) Successful in 38s
Integration Tests / test-service-integration (push) Successful in 43s
Package Tests / test-python-packages (map[name:aitbc-core path:packages/py/aitbc-core]) (push) Successful in 21s
Package Tests / test-python-packages (map[name:aitbc-agent-sdk path:packages/py/aitbc-agent-sdk]) (push) Successful in 36s
Package Tests / test-python-packages (map[name:aitbc-crypto path:packages/py/aitbc-crypto]) (push) Successful in 19s
Package Tests / test-python-packages (map[name:aitbc-sdk path:packages/py/aitbc-sdk]) (push) Successful in 20s
Package Tests / test-javascript-packages (map[name:aitbc-sdk-js path:packages/js/aitbc-sdk]) (push) Successful in 16s
Python Tests / test-python (push) Successful in 1m4s
Package Tests / test-javascript-packages (map[name:aitbc-token path:packages/solidity/aitbc-token]) (push) Successful in 1m13s
Rust ZK Components Tests / test-rust-zk (push) Successful in 44s
Security Scanning / security-scan (push) Successful in 42s
Smart Contract Tests / test-solidity (map[name:aitbc-token path:packages/solidity/aitbc-token]) (push) Successful in 39s
Smart Contract Tests / test-solidity (map[name:zk-circuits path:apps/zk-circuits]) (push) Successful in 44s
Smart Contract Tests / lint-solidity (push) Successful in 48s

Service health checks:
- Exchange API uses /api/health not /health — updated test script
  and workflow wait loops to check /api/health as fallback
- Increased wait time to 2s intervals, 15 retries for service readiness
- Performance tests now hit /health endpoints (not root /)

Hardhat compilation:
- aitbc-token was missing peer deps for @nomicfoundation/hardhat-toolbox
- Installed all 11 required peer packages (ethers, typechain, etc.)
- Contracts now compile (19 Solidity files) and all 17 tests pass

Rust workflow:
- Fixed HOME mismatch: gitea-runner HOME=/opt/gitea-runner vs
  euid root HOME=/root — explicitly set HOME=/root in all steps
- Set RUSTUP_HOME and CARGO_HOME for consistent toolchain location

Mypy type annotations (aitbc-agent-sdk):
- agent.py: narrow key types to RSA (isinstance check before sign/verify),
  fix supported_models Optional type, add __post_init__ return type
- compute_provider.py: add return types to all methods, declare
  pricing_model/dynamic_pricing attrs, rename register→create_provider
  to avoid signature conflict with parent, fix Optional safety
- swarm_coordinator.py: add return types to all 8 untyped methods
This commit is contained in:
aitbc1
2026-03-29 13:03:18 +02:00
parent 1f932d42e3
commit af34f6ae81
18 changed files with 317 additions and 221 deletions

View File

@@ -21,12 +21,12 @@ class AgentCapabilities:
"""Agent capability specification"""
compute_type: str # "inference", "training", "processing"
gpu_memory: Optional[int] = None
supported_models: List[str] = None
supported_models: Optional[List[str]] = None
performance_score: float = 0.0
max_concurrent_jobs: int = 1
specialization: Optional[str] = None
def __post_init__(self):
def __post_init__(self) -> None:
if self.supported_models is None:
self.supported_models = []
@@ -47,6 +47,9 @@ class AgentIdentity:
password=None
)
if not isinstance(private_key, rsa.RSAPrivateKey):
raise TypeError("Only RSA private keys are supported")
signature = private_key.sign(
message_str.encode(),
padding.PSS(
@@ -65,6 +68,9 @@ class AgentIdentity:
self.public_key.encode()
)
if not isinstance(public_key, rsa.RSAPublicKey):
raise TypeError("Only RSA public keys are supported")
try:
public_key.verify(
bytes.fromhex(signature),

View File

@@ -37,16 +37,18 @@ class JobExecution:
class ComputeProvider(Agent):
"""Agent that provides computational resources"""
def __init__(self, *args, **kwargs):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.current_offers: List[ResourceOffer] = []
self.active_jobs: List[JobExecution] = []
self.earnings = 0.0
self.utilization_rate = 0.0
self.earnings: float = 0.0
self.utilization_rate: float = 0.0
self.pricing_model: Dict[str, Any] = {}
self.dynamic_pricing: Dict[str, Any] = {}
@classmethod
def register(cls, name: str, capabilities: Dict[str, Any], pricing_model: Dict[str, Any]) -> 'ComputeProvider':
"""Register as a compute provider"""
def create_provider(cls, name: str, capabilities: Dict[str, Any], pricing_model: Dict[str, Any]) -> 'ComputeProvider':
"""Create and register a compute provider"""
agent = super().create(name, "compute_provider", capabilities)
provider = cls(agent.identity, agent.capabilities)
provider.pricing_model = pricing_model
@@ -112,7 +114,7 @@ class ComputeProvider(Agent):
logger.error(f"Failed to enable dynamic pricing: {e}")
return False
async def _dynamic_pricing_loop(self):
async def _dynamic_pricing_loop(self) -> None:
"""Background task for dynamic price adjustments"""
while getattr(self, 'dynamic_pricing', {}).get('enabled', False):
try:
@@ -173,7 +175,7 @@ class ComputeProvider(Agent):
logger.error(f"Failed to accept job: {e}")
return False
async def _execute_job(self, job: JobExecution, job_request: Dict[str, Any]):
async def _execute_job(self, job: JobExecution, job_request: Dict[str, Any]) -> None:
"""Execute a computational job"""
try:
# Simulate job execution
@@ -202,7 +204,7 @@ class ComputeProvider(Agent):
job.status = "failed"
logger.error(f"Job execution failed: {job.job_id} - {e}")
async def _notify_job_completion(self, job: JobExecution, earnings: float):
async def _notify_job_completion(self, job: JobExecution, earnings: float) -> None:
"""Notify consumer about job completion"""
notification = {
"job_id": job.job_id,
@@ -215,7 +217,7 @@ class ComputeProvider(Agent):
await self.send_message(job.consumer_id, "job_completion", notification)
def _update_utilization(self):
def _update_utilization(self) -> None:
"""Update current utilization rate"""
self.utilization_rate = len(self.active_jobs) / self.capabilities.max_concurrent_jobs
@@ -227,8 +229,8 @@ class ComputeProvider(Agent):
"utilization_rate": self.utilization_rate,
"active_jobs": len(self.active_jobs),
"total_earnings": self.earnings,
"average_job_duration": sum(j.actual_duration.total_seconds() for j in completed_jobs) / len(completed_jobs) if completed_jobs else 0,
"quality_score": sum(j.quality_score for j in completed_jobs if j.quality_score) / len(completed_jobs) if completed_jobs else 0,
"average_job_duration": sum(j.actual_duration.total_seconds() for j in completed_jobs if j.actual_duration) / len(completed_jobs) if completed_jobs else 0,
"quality_score": sum(j.quality_score for j in completed_jobs if j.quality_score is not None) / len(completed_jobs) if completed_jobs else 0,
"current_offers": len(self.current_offers)
}

View File

@@ -5,7 +5,7 @@ Swarm Coordinator - for agents participating in collective intelligence
import asyncio
import json
import logging
from typing import Dict, List, Optional, Any
from typing import Dict, List, Optional, Any # noqa: F401
from datetime import datetime
from dataclasses import dataclass
from .agent import Agent
@@ -37,7 +37,7 @@ class SwarmDecision:
class SwarmCoordinator(Agent):
"""Agent that participates in swarm intelligence"""
def __init__(self, *args, **kwargs):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.joined_swarms: Dict[str, Dict[str, Any]] = {}
self.swarm_reputation: Dict[str, float] = {}
@@ -91,7 +91,7 @@ class SwarmCoordinator(Agent):
logger.error(f"Failed to join swarm {swarm_type}: {e}")
return False
async def _swarm_participation_loop(self, swarm_id: str):
async def _swarm_participation_loop(self, swarm_id: str) -> None:
"""Background task for active swarm participation"""
while swarm_id in self.joined_swarms:
try:
@@ -145,7 +145,7 @@ class SwarmCoordinator(Agent):
logger.error(f"Failed to broadcast to swarm: {e}")
return False
async def _contribute_swarm_data(self, swarm_id: str):
async def _contribute_swarm_data(self, swarm_id: str) -> None:
"""Contribute data to swarm intelligence"""
try:
swarm_type = self.joined_swarms[swarm_id]["type"]
@@ -308,22 +308,22 @@ class SwarmCoordinator(Agent):
logger.error(f"Failed to analyze swarm benefits: {e}")
return {"error": str(e)}
async def _register_with_swarm(self, swarm_id: str, registration: Dict[str, Any]):
async def _register_with_swarm(self, swarm_id: str, registration: Dict[str, Any]) -> None:
"""Register with swarm coordinator (placeholder)"""
# TODO: Implement actual swarm registration
await asyncio.sleep(0.1)
async def _broadcast_to_swarm_network(self, message: SwarmMessage):
async def _broadcast_to_swarm_network(self, message: SwarmMessage) -> None:
"""Broadcast message to swarm network (placeholder)"""
# TODO: Implement actual swarm broadcasting
await asyncio.sleep(0.1)
async def _process_swarm_messages(self, swarm_id: str):
async def _process_swarm_messages(self, swarm_id: str) -> None:
"""Process incoming swarm messages (placeholder)"""
# TODO: Implement actual message processing
await asyncio.sleep(0.1)
async def _participate_in_decisions(self, swarm_id: str):
async def _participate_in_decisions(self, swarm_id: str) -> None:
"""Participate in swarm decision making (placeholder)"""
# TODO: Implement actual decision participation
await asyncio.sleep(0.1)