Migrate coordinator-api to centralized aitbc package utilities

- Migrate 69 service files from logging to aitbc.get_logger
- Migrate explorer.py HTTP client from httpx.Client to AITBCHTTPClient
- Migrate config.py hardcoded paths to use DATA_DIR and LOG_DIR constants from aitbc.constants
- Remove duplicate LOG_DIR import in config.py
- All routers already using aitbc utilities
This commit is contained in:
aitbc
2026-04-25 06:45:04 +02:00
parent 08d6921444
commit 55060730b2
71 changed files with 182 additions and 146 deletions

View File

@@ -6,6 +6,7 @@ Provides environment-based adapter selection and consolidated settings.
import os
from aitbc.constants import DATA_DIR, LOG_DIR
from pydantic import Field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
@@ -27,7 +28,7 @@ class DatabaseConfig(BaseSettings):
# Default SQLite path - consistent with blockchain-node pattern
if self.adapter == "sqlite":
return "sqlite:////var/lib/aitbc/data/coordinator.db"
return f"sqlite:///{DATA_DIR}/data/coordinator.db"
# Default PostgreSQL connection string
return f"{self.adapter}://localhost:5432/coordinator"
@@ -44,7 +45,7 @@ class Settings(BaseSettings):
app_env: str = "dev"
app_host: str = "127.0.0.1"
app_port: int = 8011
audit_log_dir: str = "/var/log/aitbc/audit"
audit_log_dir: str = str(LOG_DIR / "audit")
# Database
database: DatabaseConfig = DatabaseConfig()
@@ -196,7 +197,7 @@ class Settings(BaseSettings):
if self.database.url:
return self.database.url
# Default SQLite path - consistent with blockchain-node pattern
return "sqlite:////var/lib/aitbc/data/coordinator.db"
return f"sqlite:///{DATA_DIR}/data/coordinator.db"
@database_url.setter
def database_url(self, value: str):

View File

@@ -8,9 +8,9 @@ Adaptive Learning Systems - Phase 5.2
Reinforcement learning frameworks for agent self-improvement
"""
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -6,7 +6,6 @@ Integrates enhanced RL, multi-modal fusion, and GPU optimization
Port: 8009
"""
import logging
import uuid
from datetime import datetime
from typing import Any
@@ -17,7 +16,9 @@ from fastapi import BackgroundTasks, FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from .advanced_learning import AdvancedLearningService
from .advanced_reinforcement_learning import AdvancedReinforcementLearningEngine

View File

@@ -5,7 +5,6 @@ Real-time analytics dashboard, market insights, and performance metrics
"""
import asyncio
import logging
from collections import defaultdict, deque
from dataclasses import dataclass, field
from datetime import datetime
@@ -15,9 +14,9 @@ from typing import Any
import numpy as np
import pandas as pd
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class MetricType(StrEnum):

View File

@@ -4,9 +4,10 @@ Implements meta-learning, federated learning, and continuous model improvement
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import json
from dataclasses import asdict, dataclass
from datetime import datetime

View File

@@ -5,7 +5,6 @@ Phase 5.1: Advanced AI Capabilities Enhancement
"""
import asyncio
import logging
from datetime import datetime
from typing import Any
from uuid import uuid4
@@ -15,7 +14,9 @@ import torch
import torch.nn as nn
import torch.optim as optim
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, select

View File

@@ -4,9 +4,10 @@ Implements secure agent-to-agent messaging with reputation-based access control
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import hashlib
import json
from dataclasses import asdict, dataclass, field

View File

@@ -3,9 +3,9 @@ Agent Integration and Deployment Framework for Verifiable AI Agent Orchestration
Integrates agent orchestration with existing ML ZK proof system and provides deployment tools
"""
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -4,9 +4,10 @@ Implements multi-agent coordination and sub-task management
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum

View File

@@ -4,12 +4,13 @@ Implements meta-learning, resource optimization, and performance enhancement for
"""
import asyncio
import logging
from datetime import datetime
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, select

View File

@@ -7,9 +7,9 @@ Provides portfolio creation, rebalancing, risk assessment, and trading strategy
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlalchemy import select
from sqlmodel import Session

View File

@@ -5,9 +5,10 @@ Implements comprehensive security, auditing, and trust establishment for agent e
import hashlib
import json
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -4,9 +4,10 @@ Implements a sophisticated marketplace where agents can offer specialized servic
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import hashlib
import json
from dataclasses import dataclass, field

View File

@@ -5,7 +5,6 @@ Implements ML-based pattern recognition, behavioral analysis, and predictive ris
"""
import asyncio
import logging
import random
from collections import defaultdict
from dataclasses import dataclass, field
@@ -16,8 +15,7 @@ from typing import Any
import numpy as np
import pandas as pd
# Setup logging
logging.basicConfig(level=logging.INFO)
from aitbc import get_logger
logger = logging.getLogger(__name__)

View File

@@ -5,7 +5,6 @@ Implements AI-powered trading algorithms, predictive analytics, and portfolio op
"""
import asyncio
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from datetime import datetime, timedelta
@@ -15,9 +14,9 @@ from typing import Any
import numpy as np
import pandas as pd
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class TradingStrategy(StrEnum):

View File

@@ -7,9 +7,9 @@ Provides liquidity pool management, token swapping, and dynamic fee adjustment.
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlalchemy import select
from sqlmodel import Session

View File

@@ -7,10 +7,10 @@ Service for managing trustless cross-chain atomic swaps between agents.
from __future__ import annotations
import hashlib
import logging
import secrets
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlmodel import Session, select

View File

@@ -4,9 +4,10 @@ Implements intelligent bidding algorithms for GPU rental negotiations
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from dataclasses import asdict, dataclass
from datetime import datetime, timedelta
from enum import StrEnum

View File

@@ -5,12 +5,13 @@ Implements certification framework, partnership programs, and badge system
import hashlib
import json
import logging
from datetime import datetime, timedelta
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, and_, select

View File

@@ -3,13 +3,13 @@ Community and Developer Ecosystem Services
Services for managing OpenClaw developer tools, SDKs, and third-party solutions
"""
import logging
from datetime import datetime
from typing import Any
from aitbc import get_logger
from sqlmodel import Session, select
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from uuid import uuid4
from ..domain.community import (

View File

@@ -3,14 +3,15 @@ Enterprise Compliance Engine - Phase 6.2 Implementation
GDPR, CCPA, SOC 2, and regulatory compliance automation
"""
import logging
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class ComplianceFramework(StrEnum):

View File

@@ -4,12 +4,13 @@ Implements advanced creativity enhancement systems and specialized AI capabiliti
"""
import asyncio
import logging
from datetime import datetime
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import random
from sqlmodel import Session, and_, select

View File

@@ -7,9 +7,9 @@ Enables bridging of assets between different blockchain networks.
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlalchemy import select
from sqlmodel import Session

View File

@@ -5,7 +5,6 @@ Production-ready cross-chain bridge service with atomic swap protocol implementa
import asyncio
import hashlib
import logging
import secrets
from datetime import datetime, timedelta
from decimal import Decimal
@@ -13,7 +12,9 @@ from enum import StrEnum
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, func, select, update

View File

@@ -4,9 +4,10 @@ Implements portable reputation scores across multiple blockchain networks
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import json
from dataclasses import asdict, dataclass, field
from datetime import datetime, timedelta

View File

@@ -6,9 +6,9 @@ Service for managing multi-jurisdictional DAOs, regional councils, and global tr
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlmodel import Session, select

View File

@@ -6,9 +6,9 @@ Service for managing the developer ecosystem, bounties, certifications, and regi
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from aitbc import get_logger
from fastapi import HTTPException
from sqlmodel import Session, select

View File

@@ -6,14 +6,15 @@ Implements a scalable, fault-tolerant framework for distributed AI agent tasks a
import asyncio
import uuid
import time
import logging
import json
import hashlib
from typing import Dict, List, Optional, Any, Callable, Awaitable
from datetime import datetime
from enum import Enum
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class TaskStatus(str, Enum):
PENDING = "pending"

View File

@@ -4,7 +4,6 @@ Implements sophisticated pricing algorithms based on real-time market conditions
"""
import asyncio
import logging
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum
@@ -12,7 +11,9 @@ from typing import Any
import numpy as np
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class PricingStrategy(StrEnum):

View File

@@ -4,7 +4,6 @@ Multi-tenant API routing and management for enterprise clients
Port: 8010
"""
import logging
import secrets
import time
from datetime import datetime, timedelta
@@ -18,7 +17,9 @@ from fastapi.middleware.cors import CORSMiddleware
from fastapi.security import HTTPBearer
from pydantic import BaseModel, Field
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from ..domain.multitenant import Tenant, TenantApiKey, TenantQuota
from ..exceptions import QuotaExceededError, TenantError

View File

@@ -5,7 +5,6 @@ ERP, CRM, and business system connectors for enterprise clients
import asyncio
import json
import logging
import xml.etree.ElementTree as ET
from dataclasses import dataclass, field
from datetime import datetime, timedelta
@@ -16,7 +15,9 @@ from uuid import uuid4
import aiohttp
from pydantic import BaseModel, Field, validator
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)

View File

@@ -3,14 +3,15 @@ Advanced Load Balancing - Phase 6.4 Implementation
Intelligent traffic distribution with AI-powered auto-scaling and performance optimization
"""
import logging
import statistics
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class LoadBalancingAlgorithm(StrEnum):

View File

@@ -3,7 +3,6 @@ Enterprise Security Framework - Phase 6.2 Implementation
Zero-trust architecture with HSM integration and advanced security controls
"""
import logging
import secrets
from dataclasses import dataclass, field
from datetime import datetime, timedelta
@@ -15,7 +14,9 @@ import cryptography
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class SecurityLevel(StrEnum):

View File

@@ -258,12 +258,9 @@ class ExplorerService:
"""Get transaction details by hash from blockchain RPC"""
rpc_base = settings.blockchain_rpc_url.rstrip("/")
try:
with httpx.Client(timeout=10.0) as client:
resp = client.get(f"{rpc_base}/rpc/tx/{tx_hash}")
if resp.status_code == 404:
return {"error": "Transaction not found", "hash": tx_hash}
resp.raise_for_status()
tx_data = resp.json()
client = AITBCHTTPClient(timeout=10.0)
try:
tx_data = client.get(f"{rpc_base}/rpc/tx/{tx_hash}")
# Map RPC schema to UI-compatible format
return {
@@ -277,6 +274,11 @@ class ExplorerService:
"status": "confirmed",
"raw": tx_data, # Include raw data for debugging
}
except NetworkError as e:
# Handle 404 or network errors
if "404" in str(e) or "not found" in str(e).lower():
return {"error": "Transaction not found", "hash": tx_hash}
return {"error": f"Failed to fetch transaction: {str(e)}", "hash": tx_hash}
except Exception as e:
print(f"Warning: Failed to fetch transaction {tx_hash} from RPC: {e}")
return {"error": f"Failed to fetch transaction: {str(e)}", "hash": tx_hash}

View File

@@ -6,9 +6,9 @@ Service for managing cross-agent knowledge sharing and collaborative model train
from __future__ import annotations
import logging
from datetime import datetime
from aitbc import get_logger
from fastapi import HTTPException
from sqlmodel import Session, select

View File

@@ -1,10 +1,11 @@
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
import numpy as np
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
@dataclass

View File

@@ -5,7 +5,6 @@ Content delivery network optimization with edge computing and caching
import asyncio
import gzip
import logging
import time
import zlib
from dataclasses import dataclass, field
@@ -14,7 +13,9 @@ from enum import StrEnum
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class CDNProvider(StrEnum):

View File

@@ -9,12 +9,13 @@ Global Marketplace Services
Core services for global marketplace operations, multi-region support, and cross-chain integration
"""
import logging
from datetime import datetime, timedelta
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, select

View File

@@ -3,12 +3,13 @@ Global Marketplace Integration Service
Integration service that combines global marketplace operations with cross-chain capabilities
"""
import logging
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session, select

View File

@@ -4,13 +4,13 @@ Implements the OpenClaw DAO, voting mechanisms, and proposal lifecycle
Enhanced with multi-jurisdictional support and regional governance
"""
import logging
from datetime import datetime, timedelta
from typing import Any
from aitbc import get_logger
from sqlmodel import Session, select
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from ..domain.governance import (
DaoTreasury,

View File

@@ -9,12 +9,12 @@ Advanced GPU optimization for cross-modal attention mechanisms
Phase 5.2: System Optimization and Performance Enhancement
"""
import logging
from aitbc import get_logger
import torch
import torch.nn.functional as F
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
import time
from datetime import datetime
from typing import Any

View File

@@ -7,8 +7,8 @@ Service for offloading agent vector databases and knowledge graphs to IPFS/Filec
from __future__ import annotations
import hashlib
import logging
from aitbc import get_logger
from fastapi import HTTPException
from sqlmodel import Session, select
@@ -19,7 +19,7 @@ from ..schemas.decentralized_memory import MemoryNodeCreate
# In a real environment, this would use a library like ipfshttpclient or a service like Pinata/Web3.Storage
# For this implementation, we will mock the interactions to demonstrate the architecture.
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class IPFSAdapterService:

View File

@@ -4,9 +4,10 @@ Handles IPFS/Filecoin integration for persistent agent memory storage
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
import gzip
import hashlib
import pickle

View File

@@ -6,7 +6,6 @@ Connects with actual KYC/AML service providers for compliance verification
import asyncio
import hashlib
import logging
from dataclasses import dataclass
from datetime import datetime, timedelta
from enum import StrEnum
@@ -14,9 +13,9 @@ from typing import Any
import aiohttp
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class KYCProvider(StrEnum):

View File

@@ -5,7 +5,6 @@ Collects real-time market data from various sources for pricing calculations
import asyncio
import json
import logging
from collections.abc import Callable
from dataclasses import dataclass, field
from datetime import datetime, timedelta
@@ -14,7 +13,9 @@ from typing import Any
import websockets
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class DataSource(StrEnum):

View File

@@ -6,14 +6,15 @@ Implements advanced caching, indexing, and data optimization for the AITBC marke
import json
import time
import hashlib
import logging
from typing import Dict, List, Optional, Any, Union, Set
from collections import OrderedDict
from datetime import datetime
import redis.asyncio as redis
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class LFU_LRU_Cache:
"""Hybrid Least-Frequently/Least-Recently Used Cache for in-memory optimization"""

View File

@@ -3,9 +3,9 @@ Enhanced Marketplace Service - Simplified Version for Deployment
Basic marketplace enhancement features compatible with existing domain models
"""
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any

View File

@@ -7,7 +7,6 @@ import os
import sys
import time
import json
import logging
import asyncio
import numpy as np
from typing import Dict, List, Optional, Any, Tuple
@@ -15,6 +14,8 @@ from datetime import datetime
import threading
import multiprocessing
from aitbc import get_logger
# Try to import pycuda, fallback if not available
try:
import pycuda.driver as cuda

View File

@@ -5,12 +5,13 @@ Implements comprehensive real-time monitoring and analytics for the AITBC market
import time
import asyncio
import logging
from typing import Dict, List, Optional, Any, collections
from datetime import datetime, timedelta
import collections
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class TimeSeriesData:
"""Efficient in-memory time series data structure for real-time metrics"""

View File

@@ -5,12 +5,13 @@ Implements predictive and reactive auto-scaling of marketplace resources based o
import time
import asyncio
import logging
from typing import Dict, List, Optional, Any, Tuple
from datetime import datetime, timedelta
import math
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class ScalingPolicy:
"""Configuration for scaling behavior"""

View File

@@ -6,9 +6,10 @@ Handles memory lifecycle management, versioning, and optimization
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from dataclasses import dataclass
from datetime import datetime, timedelta
from enum import StrEnum

View File

@@ -9,9 +9,10 @@ Specialized optimization for text, image, audio, video, tabular, and graph data
"""
import asyncio
import logging
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -4,7 +4,6 @@ Advanced transaction management system for cross-chain operations with routing,
"""
import asyncio
import logging
from collections import defaultdict
from datetime import datetime, timedelta
from decimal import Decimal
@@ -12,7 +11,9 @@ from enum import StrEnum
from typing import Any
from uuid import uuid4
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from sqlmodel import Session

View File

@@ -4,17 +4,17 @@ Main entry point for multi-language services
"""
import asyncio
import logging
import os
from pathlib import Path
from typing import Any, Dict, Optional
from aitbc import get_logger
from .language_detector import LanguageDetector
from .quality_assurance import TranslationQualityChecker
from .translation_cache import TranslationCache
from .translation_engine import TranslationEngine
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MultiLanguageService:

View File

@@ -3,18 +3,18 @@ Multi-Language Agent Communication Integration
Enhanced agent communication with translation support
"""
import logging
from dataclasses import asdict, dataclass
from datetime import datetime
from enum import Enum
from typing import Any
from aitbc import get_logger
from .language_detector import LanguageDetector
from .quality_assurance import TranslationQualityChecker
from .translation_cache import TranslationCache
from .translation_engine import TranslationEngine, TranslationRequest, TranslationResponse
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class MessageType(Enum):

View File

@@ -4,10 +4,10 @@ REST API endpoints for translation and language detection services
"""
import asyncio
import logging
from datetime import datetime
from typing import Any
from aitbc import get_logger
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field, validator
@@ -17,7 +17,7 @@ from .quality_assurance import TranslationQualityChecker
from .translation_cache import TranslationCache
from .translation_engine import TranslationEngine, TranslationRequest
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
# Pydantic models for API requests/responses

View File

@@ -4,7 +4,6 @@ Automatic language detection for multi-language support
"""
import asyncio
import logging
from dataclasses import dataclass
from enum import Enum
@@ -13,7 +12,9 @@ import langdetect
from langdetect.lang_detect_exception import LangDetectException
from polyglot.detect import Detector
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class DetectionMethod(Enum):

View File

@@ -4,18 +4,18 @@ Multi-language support for marketplace listings and content
"""
import asyncio
import logging
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Any
from aitbc import get_logger
from .language_detector import LanguageDetector
from .quality_assurance import TranslationQualityChecker
from .translation_cache import TranslationCache
from .translation_engine import TranslationEngine, TranslationRequest, TranslationResponse
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class ListingType(Enum):

View File

@@ -4,7 +4,6 @@ Quality assessment and validation for translation results
"""
import asyncio
import logging
import re
from collections import Counter
from dataclasses import dataclass
@@ -17,7 +16,9 @@ import spacy
from nltk.tokenize import sent_tokenize, word_tokenize
from nltk.translate.bleu_score import SmoothingFunction, sentence_bleu
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class QualityMetric(Enum):

View File

@@ -5,7 +5,6 @@ Redis-based caching for translation results to improve performance
import hashlib
import json
import logging
import pickle
import time
from dataclasses import asdict, dataclass
@@ -14,10 +13,11 @@ from typing import Any
import redis.asyncio as redis
from redis.asyncio import Redis
from aitbc import get_logger
from ...services.secure_pickle import safe_loads
from .translation_engine import TranslationProvider, TranslationResponse
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
@dataclass

View File

@@ -5,7 +5,6 @@ Core translation orchestration service for AITBC platform
import asyncio
import hashlib
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
@@ -14,7 +13,9 @@ import deepl
import google.cloud.translate_v2 as translate
import openai
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
class TranslationProvider(Enum):

View File

@@ -14,8 +14,9 @@ import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from sqlmodel import Session, select

View File

@@ -16,7 +16,9 @@ from uuid import uuid4
import numpy as np
logger = logging.getLogger(__name__)
from aitbc import get_logger
logger = get_logger(__name__)
from .websocket_stream_manager import MessageType, StreamConfig, stream_manager

View File

@@ -4,15 +4,15 @@ Geographic load balancing, data residency compliance, and disaster recovery
"""
import asyncio
import logging
import time
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any
from uuid import uuid4
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class RegionStatus(StrEnum):

View File

@@ -9,9 +9,9 @@ Advanced AI agent capabilities with unified multi-modal processing pipeline
"""
import asyncio
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -3,9 +3,9 @@ OpenClaw Enhanced Service - Simplified Version for Deployment
Basic OpenClaw integration features compatible with existing infrastructure
"""
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime
from enum import StrEnum
from typing import Any

View File

@@ -12,8 +12,9 @@ from typing import Any
import psutil
import torch
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
@dataclass

View File

@@ -1,8 +1,8 @@
from __future__ import annotations
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from datetime import datetime
from secrets import token_hex
from typing import Any

View File

@@ -5,7 +5,7 @@ Implements proper Ethereum cryptography and secure key storage
from __future__ import annotations
import logging
from aitbc import get_logger
from datetime import datetime
from sqlalchemy import select
@@ -23,7 +23,7 @@ from .wallet_crypto import (
verify_keypair_consistency,
)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class SecureWalletService:

View File

@@ -3,9 +3,9 @@ Task Decomposition Service for OpenClaw Autonomous Economics
Implements intelligent task splitting and sub-task management
"""
import logging
from aitbc import get_logger
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
from dataclasses import dataclass, field
from datetime import datetime
from enum import StrEnum

View File

@@ -5,17 +5,15 @@ Detects market manipulation, unusual trading patterns, and suspicious activities
"""
import asyncio
import logging
import numpy as np
from dataclasses import dataclass, field
from datetime import datetime, timedelta
from enum import StrEnum
from typing import Any
import numpy as np
from aitbc import get_logger
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger = get_logger(__name__)
class AlertLevel(StrEnum):