refactor: migrate all remaining modules to use shared aitbc.logging from aitbc-core package

- Replace `import logging` with `from aitbc.logging import get_logger` across blockchain-node scripts and coordinator-api modules
- Update logger initialization from `logging.getLogger(__name__)` to `get_logger(__name__)` in 30+ files
- Add production configuration validators for API keys, HMAC secret, and JWT secret in coordinator config
- Enhance coordinator startup with comprehensive initialization logging
This commit is contained in:
oib
2026-02-28 21:17:53 +01:00
parent f6ee77f497
commit 7cb0b30dae
81 changed files with 3378 additions and 153 deletions

View File

@@ -21,10 +21,10 @@ import argparse
import json
from typing import List, Dict, Any
from dataclasses import dataclass
import logging
from aitbc.logging import get_logger
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
@dataclass

View File

@@ -13,7 +13,7 @@ import asyncio
import aiohttp
import time
import argparse
import logging
from aitbc.logging import get_logger
import json
from typing import List, Dict, Any
from datetime import datetime
@@ -21,7 +21,7 @@ import subprocess
import sys
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AutoscalingTest:

View File

@@ -6,7 +6,7 @@ Uses Starlette Broadcast to share messages between nodes
import argparse
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Any, Dict
from starlette.applications import Starlette
@@ -19,7 +19,7 @@ import uvicorn
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
# Global broadcast instance
broadcast = Broadcast("memory://")

View File

@@ -5,7 +5,7 @@ Settlement hooks for coordinator API integration
from typing import Dict, Any, Optional, List
from datetime import datetime
import asyncio
import logging
from aitbc.logging import get_logger
from .manager import BridgeManager
from .bridges.base import (
@@ -16,7 +16,7 @@ from .bridges.base import (
from ..models.job import Job
from ..models.receipt import Receipt
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class SettlementHook:

View File

@@ -13,7 +13,7 @@ Usage:
import argparse
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from pathlib import Path
from typing import List, Dict, Any
@@ -21,7 +21,7 @@ from typing import List, Dict, Any
import asyncpg
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class DataMigration:

View File

@@ -1998,4 +1998,4 @@ dev = ["pytest", "setuptools"]
[metadata]
lock-version = "2.1"
python-versions = "^3.13"
content-hash = "d7edfdbe66a1d2a8d3d170c11cd21be545ddc4e177e50492005db885ed086780"
content-hash = "5860440ac2d60a75338e2ae25a9c9165eb4a640e0a02dc6887e9724365c31f0c"

View File

@@ -11,8 +11,8 @@ packages = [
python = "^3.13"
fastapi = "^0.111.0"
uvicorn = { extras = ["standard"], version = "^0.30.0" }
pydantic = "^2.7.0"
pydantic-settings = "^2.2.1"
pydantic = ">=2.7.0"
pydantic-settings = ">=2.2.1"
sqlalchemy = {extras = ["asyncio"], version = "^2.0.47"}
aiosqlite = "^0.20.0"
sqlmodel = "^0.0.16"

View File

@@ -5,12 +5,12 @@ Multi-Modal Agent Architecture and Adaptive Learning Systems
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from enum import Enum
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AdvancedAgentCapabilities:

View File

@@ -5,12 +5,12 @@ Scaling strategies and implementation for enterprise workloads
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from enum import Enum
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class ScalingStrategy(str, Enum):

View File

@@ -5,12 +5,12 @@ On-Chain Model Marketplace Enhancement and OpenClaw Integration Enhancement
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from enum import Enum
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class HighPriorityImplementation:

View File

@@ -5,12 +5,12 @@ Week 9-12: Enterprise scaling and agent marketplace development
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from enum import Enum
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class Phase5Implementation:

View File

@@ -5,12 +5,12 @@ Complete deployment procedures for the agent orchestration system
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from pathlib import Path
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AgentOrchestrationDeployment:

View File

@@ -5,12 +5,12 @@ Ongoing maintenance, monitoring, and enhancement of the complete system
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from typing import Dict, List, Optional, Any
from enum import Enum
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MaintenancePriority(str, Enum):

View File

@@ -5,9 +5,12 @@ Provides environment-based adapter selection and consolidated settings.
"""
import os
from pydantic import field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
from typing import List, Optional
from pathlib import Path
import secrets
import string
class DatabaseConfig(BaseSettings):
@@ -58,12 +61,54 @@ class Settings(BaseSettings):
miner_api_keys: List[str] = []
admin_api_keys: List[str] = []
@field_validator('client_api_keys', 'miner_api_keys', 'admin_api_keys')
@classmethod
def validate_api_keys(cls, v: List[str]) -> List[str]:
# Allow empty API keys in development/test environments
import os
if os.getenv('APP_ENV', 'dev') != 'production' and not v:
return v
if not v:
raise ValueError('API keys cannot be empty in production')
for key in v:
if not key or key.startswith('$') or key == 'your_api_key_here':
raise ValueError('API keys must be set to valid values')
if len(key) < 16:
raise ValueError('API keys must be at least 16 characters long')
return v
# Security
hmac_secret: Optional[str] = None
jwt_secret: Optional[str] = None
jwt_algorithm: str = "HS256"
jwt_expiration_hours: int = 24
@field_validator('hmac_secret')
@classmethod
def validate_hmac_secret(cls, v: Optional[str]) -> Optional[str]:
# Allow None in development/test environments
import os
if os.getenv('APP_ENV', 'dev') != 'production' and not v:
return v
if not v or v.startswith('$') or v == 'your_secret_here':
raise ValueError('HMAC_SECRET must be set to a secure value')
if len(v) < 32:
raise ValueError('HMAC_SECRET must be at least 32 characters long')
return v
@field_validator('jwt_secret')
@classmethod
def validate_jwt_secret(cls, v: Optional[str]) -> Optional[str]:
# Allow None in development/test environments
import os
if os.getenv('APP_ENV', 'dev') != 'production' and not v:
return v
if not v or v.startswith('$') or v == 'your_secret_here':
raise ValueError('JWT_SECRET must be set to a secure value')
if len(v) < 32:
raise ValueError('JWT_SECRET must be at least 32 characters long')
return v
# CORS
allow_origins: List[str] = [
"http://localhost:3000",

View File

@@ -48,10 +48,40 @@ from contextlib import asynccontextmanager
async def lifespan(app: FastAPI):
"""Lifecycle events for the Coordinator API."""
logger.info("Starting Coordinator API")
# Initialize database if needed
init_db()
try:
# Initialize database
init_db()
logger.info("Database initialized successfully")
# Validate configuration
if settings.app_env == "production":
logger.info("Production environment detected, validating configuration")
# Configuration validation happens automatically via Pydantic validators
# Initialize audit logging directory
from pathlib import Path
audit_dir = Path(settings.audit_log_dir)
audit_dir.mkdir(parents=True, exist_ok=True)
logger.info(f"Audit logging directory: {audit_dir}")
# Log service startup details
logger.info(f"Coordinator API started on {settings.app_host}:{settings.app_port}")
logger.info(f"Database adapter: {settings.database.adapter}")
logger.info(f"Environment: {settings.app_env}")
except Exception as e:
logger.error(f"Failed to start Coordinator API: {e}")
raise
yield
logger.info("Shutting down Coordinator API")
try:
# Cleanup resources
logger.info("Coordinator API shutdown complete")
except Exception as e:
logger.error(f"Error during shutdown: {e}")
def create_app() -> FastAPI:
# Initialize rate limiter

View File

@@ -1,15 +1,25 @@
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, status, Request
from sqlmodel import select
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..deps import require_admin_key
from ..services import JobService, MinerService
from ..storage import SessionDep
from aitbc.logging import get_logger
logger = get_logger(__name__)
limiter = Limiter(key_func=get_remote_address)
router = APIRouter(prefix="/admin", tags=["admin"])
@router.get("/stats", summary="Get coordinator stats")
async def get_stats(session: SessionDep, admin_key: str = Depends(require_admin_key())) -> dict[str, int]: # type: ignore[arg-type]
@limiter.limit("20/minute")
async def get_stats(
request: Request,
session: SessionDep,
admin_key: str = Depends(require_admin_key())
) -> dict[str, int]: # type: ignore[arg-type]
service = JobService(session)
from sqlmodel import func, select
from ..domain import Job

View File

@@ -7,7 +7,7 @@ from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.creative_capabilities_service import (
@@ -15,7 +15,7 @@ from ..services.creative_capabilities_service import (
)
from ..domain.agent_performance import CreativeCapability
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/agent-creativity", tags=["agent-creativity"])

View File

@@ -5,7 +5,7 @@ Provides REST API endpoints for production deployment and integration management
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from typing import List, Optional
import logging
from aitbc.logging import get_logger
from ..domain.agent import (
AIAgentWorkflow, AgentExecution, AgentStatus, VerificationLevel
@@ -19,7 +19,7 @@ from ..deps import require_admin_key
from sqlmodel import Session, select
from datetime import datetime
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/agents/integration", tags=["Agent Integration"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.agent_performance_service import (
@@ -21,7 +21,7 @@ from ..domain.agent_performance import (
OptimizationTarget
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/agent-performance", tags=["agent-performance"])

View File

@@ -5,7 +5,7 @@ Provides REST API endpoints for agent workflow management and execution
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from typing import List, Optional
import logging
from aitbc.logging import get_logger
from ..domain.agent import (
AIAgentWorkflow, AgentWorkflowCreate, AgentWorkflowUpdate,
@@ -17,7 +17,7 @@ from ..storage import SessionDep
from ..deps import require_admin_key
from sqlmodel import Session, select
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/agents", tags=["AI Agents"])

View File

@@ -5,7 +5,7 @@ Provides REST API endpoints for security management and auditing
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from typing import List, Optional
import logging
from aitbc.logging import get_logger
from ..domain.agent import (
AIAgentWorkflow, AgentExecution, AgentStatus, VerificationLevel
@@ -19,7 +19,7 @@ from ..storage import SessionDep
from ..deps import require_admin_key
from sqlmodel import Session, select
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/agents/security", tags=["Agent Security"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.analytics_service import MarketplaceAnalytics
@@ -16,7 +16,7 @@ from ..domain.analytics import (
AnalyticsPeriod, MetricType, InsightType, ReportType
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/analytics", tags=["analytics"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.certification_service import (
@@ -20,7 +20,7 @@ from ..domain.certification import (
PartnershipType, BadgeType
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/certification", tags=["certification"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.community_service import (
@@ -19,7 +19,7 @@ from ..domain.community import (
CommunityPost, Hackathon, DeveloperTier, SolutionStatus, LabStatus
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/community", tags=["community"])

View File

@@ -8,9 +8,9 @@ import uuid
import time
import json
import os
import logging
from aitbc.logging import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from ..schemas import (
ExchangePaymentRequest,

View File

@@ -7,7 +7,7 @@ from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, Body
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.governance_service import GovernanceService
@@ -16,7 +16,7 @@ from ..domain.governance import (
ProposalStatus, VoteType, GovernanceRole
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/governance", tags=["governance"])

View File

@@ -4,7 +4,7 @@ REST API endpoints for advanced marketplace features including royalties, licens
"""
from typing import List, Optional
import logging
from aitbc.logging import get_logger
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel, Field
@@ -20,7 +20,7 @@ from ..schemas.marketplace_enhanced import (
MarketplaceAnalyticsRequest, MarketplaceAnalyticsResponse
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/marketplace/enhanced", tags=["Enhanced Marketplace"])

View File

@@ -4,7 +4,7 @@ REST API endpoints for enhanced marketplace features
"""
from typing import List, Optional, Dict, Any
import logging
from aitbc.logging import get_logger
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel, Field
@@ -14,7 +14,7 @@ from ..storage import SessionDep
from ..deps import require_admin_key
from sqlmodel import Session
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/marketplace/enhanced", tags=["Marketplace Enhanced"])

View File

@@ -8,7 +8,7 @@ from datetime import datetime
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
import sys
@@ -20,7 +20,7 @@ from aitbc.gpu_acceleration.parallel_processing.marketplace_cache_optimizer impo
from aitbc.gpu_acceleration.parallel_processing.marketplace_monitor import monitor as marketplace_monitor
from aitbc.gpu_acceleration.parallel_processing.marketplace_scaler import ResourceScaler, ScalingPolicy
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/marketplace/performance", tags=["marketplace-performance"])

View File

@@ -1,23 +1,27 @@
from datetime import datetime
from typing import Any
import logging
from fastapi import APIRouter, Depends, HTTPException, Response, status
from fastapi import APIRouter, Depends, HTTPException, Response, status, Request
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..deps import require_miner_key
from ..schemas import AssignedJob, JobFailSubmit, JobResultSubmit, JobState, MinerHeartbeat, MinerRegister, PollRequest
from ..services import JobService, MinerService
from ..services.receipts import ReceiptService
from ..storage import SessionDep
from aitbc.logging import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
limiter = Limiter(key_func=get_remote_address)
router = APIRouter(tags=["miner"])
@router.post("/miners/register", summary="Register or update miner")
@limiter.limit("30/minute")
async def register(
req: MinerRegister,
request: Request,
session: SessionDep,
miner_id: str = Depends(require_miner_key()),
) -> dict[str, Any]: # type: ignore[arg-type]
@@ -26,8 +30,10 @@ async def register(
return {"status": "ok", "session_token": record.session_token}
@router.post("/miners/heartbeat", summary="Send miner heartbeat")
@limiter.limit("60/minute")
async def heartbeat(
req: MinerHeartbeat,
request: Request,
session: SessionDep,
miner_id: str = Depends(require_miner_key()),
) -> dict[str, str]: # type: ignore[arg-type]

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks, WebSocket, WebSocketDisconnect
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.multi_modal_fusion import MultiModalFusionEngine
@@ -17,7 +17,7 @@ from ..domain.agent_performance import (
CreativeCapability
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/multi-modal-rl", tags=["multi-modal-rl"])

View File

@@ -4,7 +4,7 @@ REST API endpoints for advanced agent orchestration, edge computing integration,
"""
from typing import List, Optional
import logging
from aitbc.logging import get_logger
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel, Field
@@ -23,7 +23,7 @@ from ..schemas.openclaw_enhanced import (
EcosystemDevelopmentRequest, EcosystemDevelopmentResponse
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/openclaw/enhanced", tags=["OpenClaw Enhanced"])

View File

@@ -4,7 +4,7 @@ REST API endpoints for OpenClaw integration features
"""
from typing import List, Optional, Dict, Any
import logging
from aitbc.logging import get_logger
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel, Field
@@ -14,7 +14,7 @@ from ..storage import SessionDep
from ..deps import require_admin_key
from sqlmodel import Session
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/openclaw/enhanced", tags=["OpenClaw Enhanced"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.reputation_service import ReputationService
@@ -16,7 +16,7 @@ from ..domain.reputation import (
TrustScoreCategory
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/reputation", tags=["reputation"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.reward_service import RewardEngine
@@ -15,7 +15,7 @@ from ..domain.rewards import (
AgentRewardProfile, RewardTier, RewardType, RewardStatus
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/rewards", tags=["rewards"])

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from fastapi import APIRouter, HTTPException, Depends, Query
from pydantic import BaseModel, Field
import logging
from aitbc.logging import get_logger
from ..storage import SessionDep
from ..services.trading_service import P2PTradingProtocol
@@ -16,7 +16,7 @@ from ..domain.trading import (
TradeStatus, TradeType, NegotiationStatus, SettlementType
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
router = APIRouter(prefix="/v1/trading", tags=["trading"])

View File

@@ -6,7 +6,7 @@ Web Vitals API endpoint for collecting performance metrics
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from typing import List, Dict, Any, Optional
import logging
from aitbc.logging import get_logger
router = APIRouter()

View File

@@ -4,7 +4,7 @@ Reinforcement learning frameworks for agent self-improvement
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple, Union
from datetime import datetime, timedelta
from enum import Enum
@@ -14,7 +14,7 @@ import json
from ..storage import SessionDep
from ..domain import AIAgentWorkflow, AgentExecution, AgentStatus
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class LearningAlgorithm(str, Enum):

View File

@@ -4,7 +4,7 @@ Implements meta-learning, federated learning, and continuous model improvement
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple, Union
from datetime import datetime, timedelta
from enum import Enum
@@ -12,7 +12,7 @@ import json
import numpy as np
from dataclasses import dataclass, asdict, field
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class LearningType(str, Enum):

View File

@@ -8,7 +8,7 @@ import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -18,7 +18,7 @@ from ..domain.agent_performance import (
AgentCapability, FusionModel
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AdvancedReinforcementLearningEngine:

View File

@@ -4,7 +4,7 @@ Implements secure agent-to-agent messaging with reputation-based access control
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from enum import Enum
@@ -15,7 +15,7 @@ from dataclasses import dataclass, asdict, field
from .cross_chain_reputation import CrossChainReputationService, ReputationTier
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MessageType(str, Enum):

View File

@@ -5,7 +5,7 @@ Integrates agent orchestration with existing ML ZK proof system and provides dep
import asyncio
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from uuid import uuid4
@@ -44,7 +44,7 @@ class ZKProofService:
"details": {"mock": True}
}
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class DeploymentStatus(str, Enum):

View File

@@ -4,7 +4,7 @@ Implements multi-agent coordination and sub-task management
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple, Set
from datetime import datetime, timedelta
from enum import Enum
@@ -14,7 +14,7 @@ from dataclasses import dataclass, asdict, field
from .task_decomposition import TaskDecomposition, SubTask, SubTaskStatus, GPU_Tier
from .bid_strategy_engine import BidResult, BidStrategy, UrgencyLevel
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class OrchestratorStatus(str, Enum):

View File

@@ -8,7 +8,7 @@ import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -21,7 +21,7 @@ from ..domain.agent_performance import (
OptimizationTarget
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MetaLearningEngine:

View File

@@ -6,7 +6,7 @@ Implements comprehensive security, auditing, and trust establishment for agent e
import asyncio
import hashlib
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Set
from uuid import uuid4
@@ -20,7 +20,7 @@ from ..domain.agent import (
AgentStatus, VerificationLevel
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class SecurityLevel(str, Enum):

View File

@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from uuid import uuid4
import json
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete
from sqlalchemy.exc import SQLAlchemyError
@@ -24,7 +24,7 @@ class CoordinatorClient:
"""Mock coordinator client for agent orchestration"""
pass
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AgentStateManager:

View File

@@ -4,7 +4,7 @@ Implements a sophisticated marketplace where agents can offer specialized servic
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from enum import Enum
@@ -12,7 +12,7 @@ import json
import hashlib
from dataclasses import dataclass, asdict, field
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class ServiceStatus(str, Enum):

View File

@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import json
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -23,7 +23,7 @@ from ..domain.trading import TradingAnalytics
from ..domain.rewards import RewardAnalytics
from ..domain.reputation import AgentReputation
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class DataCollector:

View File

@@ -4,7 +4,7 @@ Implements intelligent bidding algorithms for GPU rental negotiations
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from enum import Enum
@@ -12,7 +12,7 @@ import numpy as np
import json
from dataclasses import dataclass, asdict
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class BidStrategy(str, Enum):

View File

@@ -6,7 +6,7 @@ Uses RPC to connect to Bitcoin Core (or alternative like Block.io)
import os
import json
import logging
from aitbc.logging import get_logger
from typing import Dict, Optional
try:
@@ -16,7 +16,7 @@ except ImportError:
HTTP_CLIENT_AVAILABLE = False
logging.warning("httpx not available, bitcoin wallet functions will be disabled")
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
# Bitcoin wallet configuration (credentials from environment)
WALLET_CONFIG = {

View File

@@ -4,12 +4,12 @@ Blockchain service for AITBC token operations
import httpx
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Optional
from ..config import settings
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
BLOCKCHAIN_RPC = f"http://127.0.0.1:9080/rpc"

View File

@@ -9,7 +9,7 @@ import json
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -23,7 +23,7 @@ from ..domain.certification import (
from ..domain.reputation import AgentReputation
from ..domain.rewards import AgentRewardProfile
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class CertificationSystem:

View File

@@ -6,7 +6,7 @@ Services for managing OpenClaw developer tools, SDKs, and third-party solutions
from typing import Optional, List, Dict, Any
from sqlmodel import Session, select
from datetime import datetime
import logging
from aitbc.logging import get_logger
from uuid import uuid4
from ..domain.community import (
@@ -14,7 +14,7 @@ from ..domain.community import (
CommunityPost, Hackathon, DeveloperTier, SolutionStatus, LabStatus
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class DeveloperEcosystemService:
"""Service for managing the developer ecosystem and SDKs"""

View File

@@ -8,7 +8,7 @@ import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import logging
from aitbc.logging import get_logger
import random
from sqlmodel import Session, select, update, delete, and_, or_, func
@@ -18,7 +18,7 @@ from ..domain.agent_performance import (
CreativeCapability, AgentCapability, AgentPerformanceProfile
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class CreativityEnhancementEngine:

View File

@@ -4,14 +4,14 @@ Implements portable reputation scores across multiple blockchain networks
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from enum import Enum
import json
from dataclasses import dataclass, asdict, field
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class ReputationTier(str, Enum):

View File

@@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
from typing import Dict, List, Optional, Tuple
import numpy as np
from dataclasses import dataclass
import logging
from aitbc.logging import get_logger
@dataclass
class FHEContext:

View File

@@ -6,7 +6,7 @@ Implements the OpenClaw DAO, voting mechanisms, and proposal lifecycle
from typing import Optional, List, Dict, Any
from sqlmodel import Session, select
from datetime import datetime, timedelta
import logging
from aitbc.logging import get_logger
from uuid import uuid4
from ..domain.governance import (
@@ -14,7 +14,7 @@ from ..domain.governance import (
ProposalStatus, VoteType, GovernanceRole
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class GovernanceService:
"""Core service for managing DAO operations and voting"""

View File

@@ -4,7 +4,7 @@ Advanced GPU optimization for cross-modal attention mechanisms
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
import numpy as np
from datetime import datetime
@@ -12,7 +12,7 @@ from datetime import datetime
from ..storage import SessionDep
from .multimodal_agent import ModalityType, ProcessingMode
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class GPUAcceleratedMultiModal:

View File

@@ -4,7 +4,7 @@ Handles IPFS/Filecoin integration for persistent agent memory storage
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from pathlib import Path
@@ -21,7 +21,7 @@ except ImportError as e:
logging.error(f"IPFS/Web3 dependencies not installed: {e}")
raise
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
@dataclass

View File

@@ -113,16 +113,16 @@ class JobService:
self.session.refresh(job)
return job
except Exception as e:
import logging
logger = logging.getLogger(__name__)
from aitbc.logging import get_logger
logger = get_logger(__name__)
logger.warning(f"Error checking job {job.id}: {e}")
self.session.rollback() # Rollback on individual job failure
continue
return None
except Exception as e:
import logging
logger = logging.getLogger(__name__)
from aitbc.logging import get_logger
logger = get_logger(__name__)
logger.error(f"Error acquiring next job: {e}")
raise # Propagate for caller to handle

View File

@@ -4,7 +4,7 @@ Basic marketplace enhancement features compatible with existing domain models
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Optional, Any
from datetime import datetime
from uuid import uuid4
@@ -13,7 +13,7 @@ from enum import Enum
from sqlmodel import Session, select, update
from ..domain import MarketplaceOffer, MarketplaceBid
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class RoyaltyTier(str, Enum):

View File

@@ -4,7 +4,7 @@ Handles memory lifecycle management, versioning, and optimization
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple
from datetime import datetime, timedelta
from dataclasses import dataclass, asdict
@@ -14,7 +14,7 @@ import json
from .ipfs_storage_service import IPFSStorageService, MemoryMetadata, IPFSUploadResult
from ..storage import SessionDep
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MemoryType(str, Enum):

View File

@@ -4,7 +4,7 @@ Specialized optimization for text, image, audio, video, tabular, and graph data
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Union, Tuple
from datetime import datetime
from enum import Enum
@@ -13,7 +13,7 @@ import numpy as np
from ..storage import SessionDep
from .multimodal_agent import ModalityType
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class OptimizationStrategy(str, Enum):

View File

@@ -8,7 +8,7 @@ import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -18,7 +18,7 @@ from ..domain.agent_performance import (
ReinforcementLearningConfig, AgentPerformanceProfile
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MultiModalFusionEngine:

View File

@@ -4,7 +4,7 @@ Advanced AI agent capabilities with unified multi-modal processing pipeline
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Union
from datetime import datetime
from enum import Enum
@@ -13,7 +13,7 @@ import json
from ..storage import SessionDep
from ..domain import AIAgentWorkflow, AgentExecution, AgentStatus
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class ModalityType(str, Enum):

View File

@@ -4,7 +4,7 @@ Basic OpenClaw integration features compatible with existing infrastructure
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Optional, Any
from datetime import datetime, timedelta
from uuid import uuid4
@@ -13,7 +13,7 @@ from enum import Enum
from sqlmodel import Session, select
from ..domain import MarketplaceOffer, MarketplaceBid
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class SkillType(str, Enum):

View File

@@ -3,7 +3,7 @@
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
import httpx
import logging
from aitbc.logging import get_logger
from ..domain.payment import JobPayment, PaymentEscrow
from ..schemas import (
@@ -14,7 +14,7 @@ from ..schemas import (
)
from ..storage import SessionDep
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class PaymentService:

View File

@@ -1,11 +1,11 @@
from __future__ import annotations
import logging
from aitbc.logging import get_logger
from typing import Any, Dict, Optional
from secrets import token_hex
from datetime import datetime
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from aitbc_crypto.signing import ReceiptSigner

View File

@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import json
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -22,7 +22,7 @@ from ..domain.reputation import (
from ..domain.agent import AIAgentWorkflow, AgentStatus
from ..domain.payment import PaymentTransaction
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class TrustScoreCalculator:

View File

@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import json
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -21,7 +21,7 @@ from ..domain.rewards import (
from ..domain.reputation import AgentReputation, ReputationLevel
from ..domain.payment import PaymentTransaction
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class RewardCalculator:

View File

@@ -4,14 +4,14 @@ Implements intelligent task splitting and sub-task management
"""
import asyncio
import logging
from aitbc.logging import get_logger
from typing import Dict, List, Any, Optional, Tuple, Set
from datetime import datetime, timedelta
from enum import Enum
import json
from dataclasses import dataclass, asdict, field
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class TaskType(str, Enum):

View File

@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from uuid import uuid4
import json
import logging
from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, and_, or_, func
from sqlalchemy.exc import SQLAlchemyError
@@ -22,7 +22,7 @@ from ..domain.trading import (
from ..domain.reputation import AgentReputation
from ..domain.rewards import AgentRewardProfile
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MatchingEngine:

View File

@@ -8,11 +8,11 @@ import psycopg2
from psycopg2.extras import RealDictCursor
from typing import Generator, Optional, Dict, Any, List
import json
import logging
from aitbc.logging import get_logger
from datetime import datetime
from decimal import Decimal
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from .config_pg import settings

View File

@@ -40,8 +40,10 @@ def _init_db(tmp_path_factory):
@pytest.fixture()
def session():
with session_scope() as sess:
sess.exec(delete(Job))
sess.exec(delete(Miner))
from sqlmodel import select
# Clear all data
sess.query(Job).delete()
sess.query(Miner).delete()
sess.commit()
yield sess

1537
apps/pool-hub/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
[tool.poetry]
name = "aitbc-pool-hub"
version = "0.1.0"
description = "AITBC Pool Hub Service"
authors = ["AITBC Team <team@aitbc.dev>"]
readme = "README.md"
packages = [{include = "app", from = "src"}]
[tool.poetry.dependencies]
python = "^3.13"
fastapi = "^0.111.0"
uvicorn = {extras = ["standard"], version = "^0.30.0"}
pydantic = "^2.7.0"
pydantic-settings = "^2.2.1"
sqlalchemy = {extras = ["asyncio"], version = "^2.0.47"}
aiosqlite = "^0.20.0"
sqlmodel = "^0.0.16"
httpx = "^0.27.0"
python-dotenv = "^1.0.1"
asyncpg = "^0.29.0"
aitbc-core = {path = "../../packages/py/aitbc-core"}
[tool.poetry.group.dev.dependencies]
pytest = "^8.2.0"
pytest-asyncio = "^0.23.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
import datetime as dt
import json
import logging
from aitbc.logging import get_logger
from typing import Iterable, List, Optional
from uuid import UUID
@@ -13,7 +13,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from ..models import Feedback
from ..storage.redis_keys import RedisKeys
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class FeedbackRepository:

1537
apps/wallet-daemon/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
[tool.poetry]
name = "aitbc-wallet-daemon"
version = "0.1.0"
description = "AITBC Wallet Daemon Service"
authors = ["AITBC Team <team@aitbc.dev>"]
readme = "README.md"
packages = [{include = "app", from = "src"}]
[tool.poetry.dependencies]
python = "^3.13"
fastapi = "^0.111.0"
uvicorn = {extras = ["standard"], version = "^0.30.0"}
pydantic = "^2.7.0"
pydantic-settings = "^2.2.1"
sqlalchemy = {extras = ["asyncio"], version = "^2.0.47"}
aiosqlite = "^0.20.0"
sqlmodel = "^0.0.16"
httpx = "^0.27.0"
python-dotenv = "^1.0.1"
asyncpg = "^0.29.0"
aitbc-core = {path = "../../packages/py/aitbc-core"}
[tool.poetry.group.dev.dependencies]
pytest = "^8.2.0"
pytest-asyncio = "^0.23.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
import base64
import logging
from aitbc.logging import get_logger
import base64
from fastapi import APIRouter, Depends, HTTPException, status, Request
@@ -28,7 +28,7 @@ from .ledger_mock import SQLiteLedgerAdapter
from .receipts.service import ReceiptValidationResult, ReceiptVerifierService
from .security import RateLimiter, wipe_buffer
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
_rate_limiter = RateLimiter(max_requests=30, window_seconds=60)

View File

@@ -5,9 +5,9 @@ from psycopg2.extras import RealDictCursor
from typing import Optional, Dict, Any, List
from datetime import datetime
import json
import logging
from aitbc.logging import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class PostgreSQLLedgerAdapter:
"""PostgreSQL implementation of the wallet ledger"""