refactor(coordinator-api,blockchain-explorer): add response caching and fix timestamp handling

- Add cached decorator to admin stats, job status, payment status, and marketplace stats endpoints
- Configure cache TTLs using get_cache_config for different endpoint types (1min job_list, 30s user_balance, marketplace_stats)
- Import cache_management router and include it in main app with /v1 prefix
- Fix blockchain-explorer formatTimestamp to handle both ISO string and Unix numeric timestamps with type
This commit is contained in:
oib
2026-02-28 21:50:25 +01:00
parent 93ffaf53de
commit 2d97783fb1
13 changed files with 1030 additions and 12 deletions

View File

@@ -13,6 +13,7 @@ from .marketplace_offers import router as marketplace_offers
from .payments import router as payments
from .web_vitals import router as web_vitals
from .edge_gpu import router as edge_gpu
from .cache_management import router as cache_management
# from .registry import router as registry
__all__ = [
@@ -29,5 +30,6 @@ __all__ = [
"payments",
"web_vitals",
"edge_gpu",
"cache_management",
"registry",
]

View File

@@ -6,6 +6,8 @@ from slowapi.util import get_remote_address
from ..deps import require_admin_key
from ..services import JobService, MinerService
from ..storage import SessionDep
from ..utils.cache import cached, get_cache_config
from ..config import settings
from aitbc.logging import get_logger
logger = get_logger(__name__)
@@ -14,7 +16,8 @@ router = APIRouter(prefix="/admin", tags=["admin"])
@router.get("/stats", summary="Get coordinator stats")
@limiter.limit("20/minute")
@limiter.limit(lambda: settings.rate_limit_admin_stats)
@cached(**get_cache_config("job_list")) # Cache admin stats for 1 minute
async def get_stats(
request: Request,
session: SessionDep,

View File

@@ -0,0 +1,111 @@
"""
Cache monitoring and management endpoints
"""
from fastapi import APIRouter, Depends, HTTPException, Request
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..deps import require_admin_key
from ..utils.cache_management import get_cache_stats, clear_cache, warm_cache
from ..config import settings
from aitbc.logging import get_logger
logger = get_logger(__name__)
limiter = Limiter(key_func=get_remote_address)
router = APIRouter(prefix="/cache", tags=["cache-management"])
@router.get("/stats", summary="Get cache statistics")
@limiter.limit(lambda: settings.rate_limit_admin_stats)
async def get_cache_statistics(
request: Request,
admin_key: str = Depends(require_admin_key())
):
"""Get cache performance statistics"""
try:
stats = get_cache_stats()
return {
"cache_health": stats,
"status": "healthy" if stats["health_status"] in ["excellent", "good"] else "degraded"
}
except Exception as e:
logger.error(f"Failed to get cache stats: {e}")
raise HTTPException(status_code=500, detail="Failed to retrieve cache statistics")
@router.post("/clear", summary="Clear cache entries")
@limiter.limit(lambda: settings.rate_limit_admin_stats)
async def clear_cache_entries(
request: Request,
pattern: str = None,
admin_key: str = Depends(require_admin_key())
):
"""Clear cache entries (all or matching pattern)"""
try:
result = clear_cache(pattern)
logger.info(f"Cache cleared by admin: pattern={pattern}, result={result}")
return result
except Exception as e:
logger.error(f"Failed to clear cache: {e}")
raise HTTPException(status_code=500, detail="Failed to clear cache")
@router.post("/warm", summary="Warm up cache")
@limiter.limit(lambda: settings.rate_limit_admin_stats)
async def warm_up_cache(
request: Request,
admin_key: str = Depends(require_admin_key())
):
"""Trigger cache warming for common queries"""
try:
result = warm_cache()
logger.info("Cache warming triggered by admin")
return result
except Exception as e:
logger.error(f"Failed to warm cache: {e}")
raise HTTPException(status_code=500, detail="Failed to warm cache")
@router.get("/health", summary="Get cache health status")
@limiter.limit(lambda: settings.rate_limit_admin_stats)
async def cache_health_check(
request: Request,
admin_key: str = Depends(require_admin_key())
):
"""Get detailed cache health information"""
try:
from ..utils.cache import cache_manager
stats = get_cache_stats()
cache_data = cache_manager.get_stats()
return {
"health": stats,
"detailed_stats": cache_data,
"recommendations": _get_cache_recommendations(stats)
}
except Exception as e:
logger.error(f"Failed to get cache health: {e}")
raise HTTPException(status_code=500, detail="Failed to retrieve cache health")
def _get_cache_recommendations(stats: dict) -> list:
"""Get cache performance recommendations"""
recommendations = []
hit_rate = stats["hit_rate_percent"]
total_entries = stats["total_entries"]
if hit_rate < 40:
recommendations.append("Low hit rate detected. Consider increasing cache TTL or warming cache more frequently.")
if total_entries > 10000:
recommendations.append("High number of cache entries. Consider implementing cache size limits or more aggressive cleanup.")
if hit_rate > 95:
recommendations.append("Very high hit rate. Cache TTL might be too long, consider reducing for fresher data.")
if not recommendations:
recommendations.append("Cache performance is optimal.")
return recommendations

View File

@@ -9,6 +9,7 @@ from ..services import JobService
from ..services.payments import PaymentService
from ..config import settings
from ..storage import SessionDep
from ..utils.cache import cached, get_cache_config
limiter = Limiter(key_func=get_remote_address)
router = APIRouter(tags=["client"])
@@ -44,6 +45,7 @@ async def submit_job(
@router.get("/jobs/{job_id}", response_model=JobView, summary="Get job status")
@cached(**get_cache_config("job_list")) # Cache job status for 1 minute
async def get_job(
job_id: str,
session: SessionDep,

View File

@@ -25,6 +25,8 @@ from ..schemas import (
WalletInfoResponse
)
from ..services.bitcoin_wallet import get_wallet_balance, get_wallet_info
from ..utils.cache import cached, get_cache_config
from ..config import settings
router = APIRouter(tags=["exchange"])
@@ -85,6 +87,7 @@ async def create_payment(
@router.get("/exchange/payment-status/{payment_id}", response_model=PaymentStatusResponse)
@cached(**get_cache_config("user_balance")) # Cache payment status for 30 seconds
async def get_payment_status(payment_id: str) -> Dict[str, Any]:
"""Get payment status"""

View File

@@ -9,6 +9,8 @@ from ..schemas import MarketplaceBidRequest, MarketplaceOfferView, MarketplaceSt
from ..services import MarketplaceService
from ..storage import SessionDep
from ..metrics import marketplace_requests_total, marketplace_errors_total
from ..utils.cache import cached, get_cache_config
from ..config import settings
from aitbc.logging import get_logger
logger = get_logger(__name__)
@@ -51,7 +53,8 @@ async def list_marketplace_offers(
response_model=MarketplaceStatsView,
summary="Get marketplace summary statistics",
)
@limiter.limit("50/minute")
@limiter.limit(lambda: settings.rate_limit_marketplace_stats)
@cached(**get_cache_config("marketplace_stats"))
async def get_marketplace_stats(
request: Request,
*,