chore: remove obsolete files and add Solidity build artifacts to .gitignore

- Add ignore patterns for Solidity build artifacts (typechain-types, artifacts, cache)
- Remove unused exchange mock API server (api/exchange_mock_api.py)
- Remove obsolete client-web README placeholder
- Remove deprecated marketplace-ui HTML implementation
```
This commit is contained in:
oib
2026-01-24 15:46:23 +01:00
parent 9b9c5beb23
commit 55ced77928
195 changed files with 951 additions and 30090 deletions

5
.gitignore vendored
View File

@@ -133,6 +133,11 @@ apps/coordinator-api/src/*.db
# Explorer build artifacts
apps/explorer-web/dist/
# Solidity build artifacts
packages/solidity/aitbc-token/typechain-types/
packages/solidity/aitbc-token/artifacts/
packages/solidity/aitbc-token/cache/
# Local test data
tests/fixtures/generated/

View File

@@ -1,107 +0,0 @@
#!/usr/bin/env python3
import json
from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.parse import urlparse
class Handler(BaseHTTPRequestHandler):
def _json(self, payload, status=200):
body = json.dumps(payload).encode("utf-8")
self.send_response(status)
self.send_header("Content-Type", "application/json")
self.send_header("Access-Control-Allow-Origin", "*")
self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
self.send_header("Access-Control-Allow-Headers", "Content-Type, X-Api-Key")
self.send_header("Content-Length", str(len(body)))
self.end_headers()
self.wfile.write(body)
def do_OPTIONS(self):
self.send_response(204)
self.send_header("Access-Control-Allow-Origin", "*")
self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS")
self.send_header("Access-Control-Allow-Headers", "Content-Type, X-Api-Key")
self.end_headers()
def do_GET(self):
path = urlparse(self.path).path
if path == "/api/trades/recent":
trades = [
{"id": 1, "price": 0.00001, "amount": 1500, "created_at": "2026-01-21T17:00:00Z"},
{"id": 2, "price": 0.0000095, "amount": 500, "created_at": "2026-01-21T16:55:00Z"},
]
return self._json(trades)
if path == "/api/orders/orderbook":
orderbook = {
"sells": [{"price": 0.00001, "remaining": 1500, "amount": 1500}],
"buys": [{"price": 0.000009, "remaining": 1000, "amount": 1000}],
}
return self._json(orderbook)
if path == "/api/wallet/balance":
return self._json({"balance": 1000, "currency": "AITBC"})
if path == "/api/treasury-balance":
return self._json({
"balance": 50000,
"currency": "AITBC",
"usd_value": 5000.00,
"last_updated": "2026-01-21T18:00:00Z"
})
if path == "/api/exchange/wallet/info":
return self._json({
"address": "aitbc1exchange123456789",
"balance": 1000,
"currency": "AITBC",
"total_transactions": 150,
"status": "active",
"transactions": [
{
"id": "txn_001",
"type": "deposit",
"amount": 500,
"timestamp": "2026-01-21T17:00:00Z",
"status": "completed"
},
{
"id": "txn_002",
"type": "withdrawal",
"amount": 200,
"timestamp": "2026-01-21T16:30:00Z",
"status": "completed"
},
{
"id": "txn_003",
"type": "trade",
"amount": 100,
"timestamp": "2026-01-21T16:00:00Z",
"status": "completed"
}
]
})
return self._json({"detail": "Not Found"}, status=404)
def do_POST(self):
path = urlparse(self.path).path
if path == "/api/wallet/connect":
resp = {
"success": True,
"address": "aitbc1wallet123456789",
"message": "Wallet connected successfully",
}
return self._json(resp)
return self._json({"detail": "Not Found"}, status=404)
def main():
HTTPServer(("127.0.0.1", 8085), Handler).serve_forever()
if __name__ == "__main__":
main()

View File

@@ -1,9 +0,0 @@
# Client Web
## Purpose & Scope
Front-end application that allows users to submit compute jobs, monitor status, and interact with AITBC services. See `docs/bootstrap/dirs.md` and `docs/bootstrap/examples.md` for guidance.
## Development Setup
Implementation pending. Recommended stack: lightweight web framework (per bootstrap doc) without heavy front-end frameworks.

View File

@@ -1,491 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AITBC Marketplace - GPU Compute Trading</title>
<base href="/Marketplace/">
<link rel="stylesheet" href="/assets/css/aitbc.css">
<script src="/assets/js/axios.min.js"></script>
<script src="/assets/js/lucide.js"></script>
<style>
.gradient-bg {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
}
.card-hover {
transition: all 0.3s ease;
}
.card-hover:hover {
transform: translateY(-4px);
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
}
</style>
</head>
<body class="bg-gray-50 dark:bg-gray-900 transition-colors duration-300">
<!-- Header -->
<header class="gradient-bg text-white shadow-lg">
<div class="container mx-auto px-4 py-6">
<div class="flex items-center justify-between">
<div class="flex items-center space-x-3">
<i data-lucide="cpu" class="w-8 h-8"></i>
<h1 class="text-2xl font-bold">AITBC Marketplace</h1>
</div>
<nav class="flex items-center space-x-6">
<button onclick="showSection('marketplace')" class="hover:text-purple-200 transition">Marketplace</button>
<button onclick="showSection('register')" class="hover:text-purple-200 transition">Register GPU</button>
<button onclick="showSection('my-bids')" class="hover:text-purple-200 transition">My Listings</button>
<button onclick="toggleDarkMode()" class="hover:text-purple-200 transition" title="Toggle dark mode">
<i data-lucide="moon" class="w-5 h-5" id="darkModeIcon"></i>
</button>
<button onclick="connectWallet()" class="bg-white text-purple-600 px-4 py-2 rounded-lg hover:bg-purple-100 transition">
<i data-lucide="wallet" class="w-4 h-4 inline mr-2"></i>Connect Wallet
</button>
</nav>
</div>
</div>
</header>
<!-- Main Content -->
<main class="container mx-auto px-4 py-8">
<!-- Stats Section -->
<section class="grid grid-cols-1 md:grid-cols-4 gap-6 mb-8">
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div class="flex items-center justify-between">
<div>
<p class="text-gray-500 dark:text-gray-400 text-sm">Active Bids</p>
<p class="text-2xl font-bold text-gray-900 dark:text-white" id="activeBids">0</p>
</div>
<i data-lucide="trending-up" class="w-8 h-8 text-purple-500"></i>
</div>
</div>
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div class="flex items-center justify-between">
<div>
<p class="text-gray-500 text-sm">Total Capacity</p>
<p class="text-2xl font-bold" id="totalCapacity">0 GPUs</p>
</div>
<i data-lucide="server" class="w-8 h-8 text-blue-500"></i>
</div>
</div>
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div class="flex items-center justify-between">
<div>
<p class="text-gray-500 text-sm">Avg Price</p>
<p class="text-2xl font-bold" id="avgPrice">$0.00</p>
</div>
<i data-lucide="dollar-sign" class="w-8 h-8 text-green-500"></i>
</div>
</div>
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div class="flex items-center justify-between">
<div>
<p class="text-gray-500 text-sm">Your Balance</p>
<p class="text-2xl font-bold" id="walletBalance">0 AITBC</p>
</div>
<i data-lucide="coins" class="w-8 h-8 text-yellow-500"></i>
</div>
</div>
</section>
<!-- Marketplace Section -->
<section id="marketplaceSection" class="section">
<div class="flex justify-between items-center mb-6">
<h2 class="text-2xl font-bold">Available GPU Compute</h2>
<div class="flex space-x-4">
<select class="border rounded-lg px-4 py-2" id="sortSelect">
<option value="price">Sort by Price</option>
<option value="capacity">Sort by Capacity</option>
<option value="memory">Sort by Memory</option>
</select>
<button onclick="refreshMarketplace()" class="bg-purple-600 text-white px-4 py-2 rounded-lg hover:bg-purple-700 transition">
<i data-lucide="refresh-cw" class="w-4 h-4 inline mr-2"></i>Refresh
</button>
</div>
</div>
<div id="marketplaceList" class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
<!-- GPU cards will be inserted here -->
</div>
</section>
<!-- Register GPU Section -->
<section id="registerSection" class="section hidden">
<div class="max-w-2xl mx-auto">
<h2 class="text-2xl font-bold mb-6">Register Your GPU</h2>
<div class="bg-white rounded-lg shadow-lg p-8">
<form id="gpuRegisterForm" class="space-y-6">
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">GPU Model</label>
<input type="text" id="gpuModel" class="w-full border rounded-lg px-4 py-2" placeholder="e.g., NVIDIA RTX 4060 Ti" required>
</div>
<div class="grid grid-cols-2 gap-4">
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">Memory (GB)</label>
<input type="number" id="gpuMemory" class="w-full border rounded-lg px-4 py-2" placeholder="16" required>
</div>
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">Price per Hour ($)</label>
<input type="number" id="gpuPrice" step="0.01" class="w-full border rounded-lg px-4 py-2" placeholder="0.50" required>
</div>
</div>
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">CUDA Version</label>
<select id="cudaVersion" class="w-full border rounded-lg px-4 py-2">
<option value="11.8">CUDA 11.8</option>
<option value="12.0">CUDA 12.0</option>
<option value="12.1">CUDA 12.1</option>
<option value="12.2">CUDA 12.2</option>
<option value="12.3">CUDA 12.3</option>
<option value="12.4" selected>CUDA 12.4</option>
</select>
</div>
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">Supported Models</label>
<div class="space-y-2">
<label class="flex items-center">
<input type="checkbox" value="stable-diffusion" class="mr-2" checked>
<span>Stable Diffusion</span>
</label>
<label class="flex items-center">
<input type="checkbox" value="llama2-7b" class="mr-2" checked>
<span>LLaMA-2 7B</span>
</label>
<label class="flex items-center">
<input type="checkbox" value="llama2-13b" class="mr-2">
<span>LLaMA-2 13B</span>
</label>
<label class="flex items-center">
<input type="checkbox" value="whisper" class="mr-2" checked>
<span>Whisper</span>
</label>
<label class="flex items-center">
<input type="checkbox" value="clip" class="mr-2" checked>
<span>CLIP</span>
</label>
</div>
</div>
<div>
<label class="block text-sm font-medium text-gray-700 mb-2">Additional Notes</label>
<textarea id="gpuNotes" rows="3" class="w-full border rounded-lg px-4 py-2" placeholder="Any additional information about your GPU setup..."></textarea>
</div>
<button type="submit" class="w-full bg-purple-600 text-white py-3 rounded-lg hover:bg-purple-700 transition font-semibold">
Register GPU
</button>
</form>
</div>
</div>
</section>
<!-- My Bids Section -->
<section id="myBidsSection" class="section hidden">
<h2 class="text-2xl font-bold mb-6">My GPU Listings</h2>
<div id="myBidsList" class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
<!-- Your listings will appear here -->
</div>
</section>
</main>
<!-- Toast Notification -->
<div id="toast" class="fixed bottom-4 right-4 bg-green-500 text-white px-6 py-3 rounded-lg shadow-lg transform translate-y-full transition-transform duration-300">
<span id="toastMessage"></span>
</div>
<script>
// API Configuration
const API_BASE = window.location.origin + '/api';
const BLOCKCHAIN_API = window.location.origin + '/rpc';
let walletAddress = null;
let connectedWallet = null;
// Initialize
document.addEventListener('DOMContentLoaded', () => {
lucide.createIcons();
loadMarketplaceStats();
loadMarketplaceBids();
// Form submission
document.getElementById('gpuRegisterForm').addEventListener('submit', registerGPU);
// Check for saved dark mode preference
if (localStorage.getItem('darkMode') === 'true' ||
(!localStorage.getItem('darkMode') && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
document.documentElement.classList.add('dark');
updateDarkModeIcon(true);
}
});
// Dark mode toggle
function toggleDarkMode() {
const isDark = document.documentElement.classList.toggle('dark');
localStorage.setItem('darkMode', isDark);
updateDarkModeIcon(isDark);
}
function updateDarkModeIcon(isDark) {
const icon = document.getElementById('darkModeIcon');
if (isDark) {
icon.setAttribute('data-lucide', 'sun');
} else {
icon.setAttribute('data-lucide', 'moon');
}
lucide.createIcons();
}
// Section Navigation
function showSection(section) {
document.querySelectorAll('.section').forEach(s => s.classList.add('hidden'));
document.getElementById(section + 'Section').classList.remove('hidden');
if (section === 'my-bids') {
loadMyBids();
}
}
// Connect Wallet
async function connectWallet() {
// For demo, create a new wallet
const walletId = 'wallet-' + Math.random().toString(36).substr(2, 9);
const address = 'aitbc1' + walletId + 'x'.repeat(40 - walletId.length);
connectedWallet = {
id: walletId,
address: address,
publicKey: '0x' + Array(64).fill(0).map(() => Math.floor(Math.random() * 16).toString(16)).join('')
};
walletAddress = address;
showToast('Wallet connected: ' + address.substring(0, 20) + '...');
updateWalletBalance();
}
// Load Marketplace Stats
async function loadMarketplaceStats() {
try {
const response = await axios.get(`${API_BASE}/marketplace/stats`);
const stats = response.data;
document.getElementById('activeBids').textContent = stats.activeBids;
document.getElementById('totalCapacity').textContent = stats.openCapacity + ' GPUs';
document.getElementById('avgPrice').textContent = '$' + stats.averagePrice.toFixed(2);
} catch (error) {
console.error('Failed to load stats:', error);
}
}
// Load Marketplace Bids
async function loadMarketplaceBids() {
try {
const response = await axios.get(`${API_BASE}/marketplace/offers`);
const bids = response.data;
displayMarketplaceBids(bids);
} catch (error) {
console.error('Failed to load bids:', error);
// Display demo data if API fails
displayDemoBids();
}
}
// Display Marketplace Bids
function displayMarketplaceBids(bids) {
const container = document.getElementById('marketplaceList');
if (bids.length === 0) {
container.innerHTML = '<div class="col-span-full text-center py-12 text-gray-500">No GPU offers available at the moment.</div>';
return;
}
container.innerHTML = bids.map(bid => `
<div class="bg-white rounded-lg shadow-lg p-6 card-hover">
<div class="flex justify-between items-start mb-4">
<h3 class="text-lg font-semibold">${bid.provider}</h3>
<span class="bg-green-100 text-green-800 px-2 py-1 rounded text-sm">Available</span>
</div>
<div class="space-y-2 text-sm text-gray-600 mb-4">
<p><i data-lucide="monitor" class="w-4 h-4 inline mr-1"></i>GPU: ${bid.gpu_model || 'Not specified'}</p>
<p><i data-lucide="hard-drive" class="w-4 h-4 inline mr-1"></i>Memory: ${bid.gpu_memory_gb || 'N/A'} GB</p>
<p><i data-lucide="clock" class="w-4 h-4 inline mr-1"></i>Capacity: ${bid.capacity || 1} GPU(s)</p>
</div>
<div class="flex justify-between items-center">
<span class="text-2xl font-bold text-purple-600">$${bid.price || '0.50'}/hr</span>
<button onclick="purchaseGPU('${bid.id}')" class="bg-purple-600 text-white px-4 py-2 rounded hover:bg-purple-700 transition">
Purchase
</button>
</div>
${bid.notes ? `<p class="mt-4 text-sm text-gray-500">${bid.notes}</p>` : ''}
</div>
`).join('');
lucide.createIcons();
}
// Display Demo Bids (for testing)
function displayDemoBids() {
const demoBids = [
{
id: 'demo1',
provider: 'REDACTED_MINER_KEY',
gpu_model: 'NVIDIA RTX 4060 Ti',
gpu_memory_gb: 16,
capacity: 1,
price: 0.50,
notes: 'NVIDIA RTX 4060 Ti 16GB - Available for AI workloads'
}
];
displayMarketplaceBids(demoBids);
}
// Register GPU
async function registerGPU(e) {
e.preventDefault();
const gpuModel = document.getElementById('gpuModel').value;
const gpuMemory = document.getElementById('gpuMemory').value;
const gpuPrice = document.getElementById('gpuPrice').value;
const cudaVersion = document.getElementById('cudaVersion').value;
const gpuNotes = document.getElementById('gpuNotes').value;
const supportedModels = [];
document.querySelectorAll('input[type="checkbox"]:checked').forEach(cb => {
supportedModels.push(cb.value);
});
try {
// First register as miner
const minerResponse = await axios.post(`${API_BASE}/miners/register`, {
capabilities: {
gpu: gpuModel,
gpu_memory_gb: parseInt(gpuMemory),
cuda_version: cudaVersion,
supported_models: supportedModels,
region: 'local',
pricing_per_hour: parseFloat(gpuPrice)
}
}, {
headers: { 'X-Api-Key': 'REDACTED_MINER_KEY' }
});
// Then create marketplace bid
const bidResponse = await axios.post(`${API_BASE}/marketplace/bids`, {
provider: 'REDACTED_MINER_KEY',
capacity: 1,
price: parseFloat(gpuPrice),
notes: `${gpuModel} ${gpuMemory}GB - ${supportedModels.join(', ')}${gpuNotes ? '. ' + gpuNotes : ''}`
}, {
headers: { 'X-Api-Key': 'REDACTED_CLIENT_KEY' }
});
showToast('GPU registered successfully!');
document.getElementById('gpuRegisterForm').reset();
loadMarketplaceStats();
loadMarketplaceBids();
} catch (error) {
console.error('Registration failed:', error);
showToast('Registration failed. Please try again.', 'error');
}
}
// Purchase GPU
async function purchaseGPU(bidId) {
if (!walletAddress) {
showToast('Please connect your wallet first', 'error');
return;
}
// Create job for GPU purchase
try {
const response = await axios.post(`${API_BASE}/jobs`, {
job_type: 'inference',
model: 'stable-diffusion',
requirements: {
gpu_memory_min_gb: 8,
cuda_version_min: '11.0'
},
pricing: {
max_price_per_hour: 1.0,
duration_hours: 1
}
}, {
headers: { 'X-Api-Key': 'REDACTED_CLIENT_KEY' }
});
showToast('GPU time purchased successfully!');
updateWalletBalance();
} catch (error) {
console.error('Purchase failed:', error);
showToast('Purchase failed. Please try again.', 'error');
}
}
// Load My Bids
function loadMyBids() {
const myBidsList = document.getElementById('myBidsList');
// For demo, show the registered GPU
myBidsList.innerHTML = `
<div class="bg-white rounded-lg shadow-lg p-6">
<div class="flex justify-between items-start mb-4">
<h3 class="text-lg font-semibold">NVIDIA RTX 4060 Ti</h3>
<span class="bg-green-100 text-green-800 px-2 py-1 rounded text-sm">Active</span>
</div>
<div class="space-y-2 text-sm text-gray-600 mb-4">
<p><i data-lucide="monitor" class="w-4 h-4 inline mr-1"></i>Memory: 16 GB</p>
<p><i data-lucide="clock" class="w-4 h-4 inline mr-1"></i>Price: $0.50/hr</p>
<p><i data-lucide="activity" class="w-4 h-4 inline mr-1"></i>Status: Available</p>
</div>
<div class="flex space-x-2">
<button class="flex-1 bg-blue-600 text-white px-3 py-2 rounded hover:bg-blue-700 transition text-sm">
Edit
</button>
<button class="flex-1 bg-red-600 text-white px-3 py-2 rounded hover:bg-red-700 transition text-sm">
Remove
</button>
</div>
</div>
`;
lucide.createIcons();
}
// Update Wallet Balance
async function updateWalletBalance() {
if (!walletAddress) return;
try {
const response = await axios.get(`${BLOCKCHAIN_API}/getBalance/${walletAddress}`);
document.getElementById('walletBalance').textContent = response.data.balance + ' AITBC';
} catch (error) {
document.getElementById('walletBalance').textContent = '1000 AITBC'; // Demo balance
}
}
// Refresh Marketplace
function refreshMarketplace() {
loadMarketplaceStats();
loadMarketplaceBids();
showToast('Marketplace refreshed');
}
// Toast Notification
function showToast(message, type = 'success') {
const toast = document.getElementById('toast');
const toastMessage = document.getElementById('toastMessage');
toastMessage.textContent = message;
toast.className = `fixed bottom-4 right-4 px-6 py-3 rounded-lg shadow-lg transform transition-transform duration-300 ${
type === 'error' ? 'bg-red-500' : 'bg-green-500'
} text-white`;
toast.style.transform = 'translateY(0)';
setTimeout(() => {
toast.style.transform = 'translateY(100%)';
}, 3000);
}
</script>
</body>
</html>

View File

@@ -1,53 +0,0 @@
#!/usr/bin/env python3
"""
Simple HTTP server for the AITBC Marketplace UI
"""
import os
import sys
from http.server import HTTPServer, SimpleHTTPRequestHandler
import argparse
class CORSHTTPRequestHandler(SimpleHTTPRequestHandler):
def end_headers(self):
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
self.send_header('Access-Control-Allow-Headers', 'Content-Type, X-Api-Key')
super().end_headers()
def do_OPTIONS(self):
self.send_response(200)
self.end_headers()
def run_server(port=3000, directory=None):
"""Run the HTTP server"""
if directory:
os.chdir(directory)
server_address = ('', port)
httpd = HTTPServer(server_address, CORSHTTPRequestHandler)
print(f"""
╔═══════════════════════════════════════╗
║ AITBC Marketplace UI Server ║
╠═══════════════════════════════════════╣
║ Server running at: ║
║ http://localhost:{port}
║ ║
║ Press Ctrl+C to stop ║
╚═══════════════════════════════════════╝
""")
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\nShutting down server...")
httpd.server_close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run the AITBC Marketplace UI server')
parser.add_argument('--port', type=int, default=3000, help='Port to run the server on')
parser.add_argument('--dir', type=str, default='.', help='Directory to serve from')
args = parser.parse_args()
run_server(port=args.port, directory=args.dir)

View File

@@ -1,164 +0,0 @@
# AITBC Miner Dashboard
A real-time monitoring dashboard for GPU mining operations in the AITBC network.
## Features
### 🎯 GPU Monitoring
- Real-time GPU utilization
- Temperature monitoring
- Power consumption tracking
- Memory usage display
- Performance state indicators
### ⛏️ Mining Operations
- Active job tracking
- Job progress visualization
- Success/failure statistics
- Average job time metrics
### 📊 Performance Analytics
- GPU utilization charts (last hour)
- Hash rate performance tracking
- Mining statistics dashboard
- Service capability overview
### 🔧 Available Services
- GPU Computing (CUDA cores)
- Parallel Processing (multi-threaded)
- Hash Generation (proof-of-work)
- AI Model Training (ML operations)
- Blockchain Validation
- Data Processing
## Quick Start
### 1. Deploy the Dashboard
```bash
cd /home/oib/windsurf/aitbc/apps/miner-dashboard
sudo ./deploy.sh
```
### 2. Access the Dashboard
- Local: http://localhost:8080
- Remote: http://[SERVER_IP]:8080
### 3. Monitor Mining
- View real-time GPU status
- Track active mining jobs
- Monitor hash rates
- Check service availability
## Architecture
```
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
│ Web Browser │◄──►│ Dashboard Server │◄──►│ GPU Miner │
│ (Dashboard UI) │ │ (Port 8080) │ │ (Background) │
└─────────────────┘ └──────────────────┘ └─────────────────┘
┌─────────────────┐
│ nvidia-smi │
│ (GPU Metrics) │
└─────────────────┘
```
## API Endpoints
- `GET /api/gpu-status` - Real-time GPU metrics
- `GET /api/mining-jobs` - Active mining jobs
- `GET /api/statistics` - Mining statistics
- `GET /api/services` - Available services
## Service Management
### Start Services
```bash
sudo systemctl start aitbc-miner
sudo systemctl start aitbc-miner-dashboard
```
### Stop Services
```bash
sudo systemctl stop aitbc-miner
sudo systemctl stop aitbc-miner-dashboard
```
### View Logs
```bash
sudo journalctl -u aitbc-miner -f
sudo journalctl -u aitbc-miner-dashboard -f
```
## GPU Requirements
- NVIDIA GPU with CUDA support
- nvidia-smi utility installed
- GPU memory: 4GB+ recommended
- CUDA drivers up to date
## Troubleshooting
### Dashboard Not Loading
```bash
# Check service status
sudo systemctl status aitbc-miner-dashboard
# Check logs
sudo journalctl -u aitbc-miner-dashboard -n 50
```
### GPU Not Detected
```bash
# Verify nvidia-smi
nvidia-smi
# Check GPU permissions
ls -l /dev/nvidia*
```
### No Mining Jobs
```bash
# Check miner service
sudo systemctl status aitbc-miner
# Restart if needed
sudo systemctl restart aitbc-miner
```
## Configuration
### GPU Monitoring
The dashboard automatically detects NVIDIA GPUs using nvidia-smi.
### Mining Performance
Adjust mining parameters in `miner_service.py`:
- Job frequency
- Processing duration
- Success rates
### Dashboard Port
Change port in `dashboard_server.py` (default: 8080).
## Security
- Dashboard runs on localhost by default
- No external database required
- Minimal dependencies
- Read-only GPU monitoring
## Development
### Extend Services
Add new mining services in the `get_services()` method.
### Customize UI
Modify `index.html` to change the dashboard appearance.
### Add Metrics
Extend the API with new endpoints for additional metrics.
## License
AITBC Project - Internal Use Only

View File

@@ -1,15 +0,0 @@
[Unit]
Description=AITBC Miner Dashboard
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/aitbc-miner-dashboard
Environment=PYTHONPATH=/opt/aitbc-miner-dashboard
ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python dashboard_server.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

View File

@@ -1,15 +0,0 @@
[Unit]
Description=AITBC GPU Mining Service
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/aitbc-miner-dashboard
Environment=PYTHONPATH=/opt/aitbc-miner-dashboard
ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python miner_service.py
Restart=always
RestartSec=3
[Install]
WantedBy=multi-user.target

View File

@@ -1,185 +0,0 @@
#!/usr/bin/env python3
"""AITBC Miner Dashboard API - Real-time GPU and mining status"""
from http.server import HTTPServer, BaseHTTPRequestHandler
import json
import subprocess
import psutil
from datetime import datetime, timedelta
import random
class MinerDashboardHandler(BaseHTTPRequestHandler):
def send_json_response(self, data, status=200):
"""Send JSON response"""
self.send_response(status)
self.send_header('Content-Type', 'application/json')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
self.wfile.write(json.dumps(data, default=str).encode())
def do_GET(self):
"""Handle GET requests"""
if self.path == '/api/gpu-status':
self.get_gpu_status()
elif self.path == '/api/mining-jobs':
self.get_mining_jobs()
elif self.path == '/api/statistics':
self.get_statistics()
elif self.path == '/api/services':
self.get_services()
elif self.path == '/' or self.path == '/index.html':
self.serve_dashboard()
else:
self.send_error(404)
def get_gpu_status(self):
"""Get real GPU status from nvidia-smi"""
try:
# Parse nvidia-smi output
result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,performance_state', '--format=csv,noheader,nounits'],
capture_output=True, text=True)
if result.returncode == 0:
values = result.stdout.strip().split(', ')
gpu_data = {
'utilization': int(values[0]),
'temperature': int(values[1]),
'power_usage': float(values[2]),
'memory_used': float(values[3]) / 1024, # Convert MB to GB
'memory_total': float(values[4]) / 1024,
'performance_state': values[5],
'timestamp': datetime.now().isoformat()
}
self.send_json_response(gpu_data)
else:
# Fallback to mock data
self.send_json_response({
'utilization': 0,
'temperature': 43,
'power_usage': 18,
'memory_used': 2.9,
'memory_total': 16,
'performance_state': 'P8',
'timestamp': datetime.now().isoformat()
})
except Exception as e:
self.send_json_response({'error': str(e)}, 500)
def get_mining_jobs(self):
"""Get active mining jobs from the miner service"""
try:
# Connect to miner service via socket or API
# For now, simulate with mock data
jobs = [
{
'id': 'job_12345',
'name': 'Matrix Computation',
'progress': 85,
'status': 'running',
'started_at': (datetime.now() - timedelta(minutes=10)).isoformat(),
'estimated_completion': (datetime.now() + timedelta(minutes=2)).isoformat()
},
{
'id': 'job_12346',
'name': 'Hash Validation',
'progress': 42,
'status': 'running',
'started_at': (datetime.now() - timedelta(minutes=5)).isoformat(),
'estimated_completion': (datetime.now() + timedelta(minutes=7)).isoformat()
}
]
self.send_json_response(jobs)
except Exception as e:
self.send_json_response({'error': str(e)}, 500)
def get_statistics(self):
"""Get mining statistics"""
stats = {
'total_jobs_completed': random.randint(1200, 1300),
'average_job_time': round(random.uniform(10, 15), 1),
'success_rate': round(random.uniform(95, 99), 1),
'total_earned_btc': round(random.uniform(0.004, 0.005), 4),
'total_earned_aitbc': random.randint(100, 200),
'uptime_hours': 24,
'hash_rate': round(random.uniform(45, 55), 1), # MH/s
'efficiency': round(random.uniform(0.8, 1.2), 2) # W/MH
}
self.send_json_response(stats)
def get_services(self):
"""Get available mining services"""
services = [
{
'name': 'GPU Computing',
'description': 'CUDA cores available for computation',
'status': 'active',
'capacity': '100%',
'utilization': 65
},
{
'name': 'Parallel Processing',
'description': 'Multi-threaded job execution',
'status': 'active',
'capacity': '8 threads',
'utilization': 45
},
{
'name': 'Hash Generation',
'description': 'Proof-of-work computation',
'status': 'standby',
'capacity': '50 MH/s',
'utilization': 0
},
{
'name': 'AI Model Training',
'description': 'Machine learning operations',
'status': 'available',
'capacity': '16GB VRAM',
'utilization': 0
},
{
'name': 'Blockchain Validation',
'description': 'AITBC block validation',
'status': 'active',
'capacity': '1000 tx/s',
'utilization': 23
},
{
'name': 'Data Processing',
'description': 'Large dataset processing',
'status': 'available',
'capacity': '500GB/hour',
'utilization': 0
}
]
self.send_json_response(services)
def serve_dashboard(self):
"""Serve the dashboard HTML"""
try:
with open('index.html', 'r') as f:
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(f.read().encode())
except FileNotFoundError:
self.send_error(404, 'Dashboard not found')
def run_server(port=8080):
"""Run the miner dashboard server"""
server = HTTPServer(('localhost', port), MinerDashboardHandler)
print(f"""
╔═══════════════════════════════════════╗
║ AITBC Miner Dashboard Server ║
╠═══════════════════════════════════════╣
║ Dashboard running at: ║
║ http://localhost:{port}
║ ║
║ GPU Monitoring Active! ║
║ Real-time Mining Status ║
╚═══════════════════════════════════════╝
""")
server.serve_forever()
if __name__ == "__main__":
run_server()

View File

@@ -1,71 +0,0 @@
#!/bin/bash
echo "=== AITBC Miner Dashboard & Service Deployment ==="
echo ""
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "Please run as root (use sudo)"
exit 1
fi
# Create directories
echo "Creating directories..."
mkdir -p /opt/aitbc-miner-dashboard
mkdir -p /var/log/aitbc-miner
# Copy files
echo "Copying files..."
cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/
# Set permissions
chown -R root:root /opt/aitbc-miner-dashboard
chmod +x /opt/aitbc-miner-dashboard/*.py
chmod +x /opt/aitbc-miner-dashboard/*.sh
# Create virtual environment
echo "Setting up Python environment..."
cd /opt/aitbc-miner-dashboard
python3 -m venv .venv
.venv/bin/pip install psutil
# Install systemd services
echo "Installing systemd services..."
cp aitbc-miner-dashboard.service /etc/systemd/system/
cp aitbc-miner.service /etc/systemd/system/
# Reload systemd
systemctl daemon-reload
# Enable and start services
echo "Starting services..."
systemctl enable aitbc-miner
systemctl enable aitbc-miner-dashboard
systemctl start aitbc-miner
systemctl start aitbc-miner-dashboard
# Wait for services to start
sleep 5
# Check status
echo ""
echo "=== Service Status ==="
systemctl status aitbc-miner --no-pager -l | head -5
systemctl status aitbc-miner-dashboard --no-pager -l | head -5
# Get IP address
IP=$(hostname -I | awk '{print $1}')
echo ""
echo "✅ Deployment complete!"
echo ""
echo "Services:"
echo " - Miner Service: Running (background)"
echo " - Dashboard: http://localhost:8080"
echo ""
echo "Access from other machines:"
echo " http://$IP:8080"
echo ""
echo "To view logs:"
echo " sudo journalctl -u aitbc-miner -f"
echo " sudo journalctl -u aitbc-miner-dashboard -f"

View File

@@ -1,356 +0,0 @@
#!/bin/bash
echo "========================================"
echo " AITBC GPU Miner Dashboard Setup"
echo " Running on HOST (at1/localhost)"
echo "========================================"
echo ""
# Check if we have GPU access
if ! command -v nvidia-smi &> /dev/null; then
echo "❌ ERROR: nvidia-smi not found!"
echo "Please ensure NVIDIA drivers are installed on the host."
exit 1
fi
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
echo ""
# Create dashboard directory
mkdir -p ~/miner-dashboard
cd ~/miner-dashboard
echo "Creating dashboard files..."
# Create the main dashboard HTML
cat > index.html << 'HTML'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AITBC GPU Miner Dashboard - Host</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<style>
@keyframes pulse-green {
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
}
.gpu-gradient { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }
.status-active { animation: pulse-green 2s infinite; }
</style>
</head>
<body class="bg-gray-900 text-white min-h-screen">
<!-- Header -->
<header class="bg-gray-800 shadow-xl">
<div class="container mx-auto px-6 py-4">
<div class="flex items-center justify-between">
<div class="flex items-center space-x-4">
<i class="fas fa-microchip text-4xl text-purple-500"></i>
<div>
<h1 class="text-3xl font-bold">AITBC GPU Miner Dashboard</h1>
<p class="text-green-400">✓ Running on HOST with direct GPU access</p>
</div>
</div>
<div class="flex items-center space-x-4">
<span class="flex items-center bg-green-900/50 px-3 py-1 rounded-full">
<span class="w-3 h-3 bg-green-500 rounded-full status-active mr-2"></span>
<span>GPU Online</span>
</span>
<button onclick="location.reload()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
<i class="fas fa-sync-alt mr-2"></i>Refresh
</button>
</div>
</div>
</div>
</header>
<!-- Main Content -->
<main class="container mx-auto px-6 py-8">
<!-- GPU Status Card -->
<div class="gpu-gradient rounded-xl p-8 mb-8 text-white shadow-2xl">
<div class="flex items-center justify-between mb-6">
<div>
<h2 class="text-3xl font-bold mb-2" id="gpuName">NVIDIA GeForce RTX 4060 Ti</h2>
<p class="text-purple-200">Real-time GPU Performance Monitor</p>
</div>
<div class="text-right">
<div class="text-5xl font-bold" id="gpuUtil">0%</div>
<div class="text-purple-200">GPU Utilization</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Temperature</p>
<p class="text-2xl font-bold" id="gpuTemp">--°C</p>
</div>
<i class="fas fa-thermometer-half text-3xl text-orange-400"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Power Usage</p>
<p class="text-2xl font-bold" id="gpuPower">--W</p>
</div>
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Memory Used</p>
<p class="text-2xl font-bold" id="gpuMem">--GB</p>
</div>
<i class="fas fa-memory text-3xl text-blue-400"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Performance</p>
<p class="text-2xl font-bold" id="gpuPerf">P8</p>
</div>
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
</div>
</div>
</div>
</div>
<!-- Mining Operations -->
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
<!-- Active Jobs -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-tasks mr-3 text-green-500"></i>
Mining Operations
<span id="jobCount" class="ml-auto text-sm text-gray-400">0 active jobs</span>
</h3>
<div id="jobList" class="space-y-3">
<div class="text-center py-8">
<i class="fas fa-pause-circle text-6xl text-yellow-500 mb-4"></i>
<p class="text-xl font-semibold text-yellow-500">Miner Idle</p>
<p class="text-gray-400 mt-2">Ready to accept mining jobs</p>
</div>
</div>
</div>
<!-- GPU Services -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-server mr-3 text-blue-500"></i>
GPU Services Status
</h3>
<div class="space-y-3">
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
<div class="flex items-center">
<i class="fas fa-cube text-purple-400 mr-3"></i>
<div>
<p class="font-semibold">CUDA Computing</p>
<p class="text-sm text-gray-400">4352 CUDA cores available</p>
</div>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
<div class="flex items-center">
<i class="fas fa-project-diagram text-blue-400 mr-3"></i>
<div>
<p class="font-semibold">Parallel Processing</p>
<p class="text-sm text-gray-400">Multi-threaded operations</p>
</div>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
<div class="flex items-center">
<i class="fas fa-hashtag text-green-400 mr-3"></i>
<div>
<p class="font-semibold">Hash Generation</p>
<p class="text-sm text-gray-400">Proof-of-work computation</p>
</div>
</div>
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Standby</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
<div class="flex items-center">
<i class="fas fa-brain text-pink-400 mr-3"></i>
<div>
<p class="font-semibold">AI Model Training</p>
<p class="text-sm text-gray-400">Machine learning operations</p>
</div>
</div>
<span class="bg-gray-600 px-3 py-1 rounded-full text-sm">Available</span>
</div>
</div>
</div>
</div>
<!-- Performance Charts -->
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">GPU Utilization (Last Hour)</h3>
<canvas id="utilChart" width="400" height="200"></canvas>
</div>
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">Hash Rate Performance</h3>
<canvas id="hashChart" width="400" height="200"></canvas>
</div>
</div>
<!-- System Info -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">System Information</h3>
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
<div class="bg-gray-700 rounded-lg p-4 text-center">
<i class="fas fa-desktop text-3xl text-blue-400 mb-2"></i>
<p class="text-sm text-gray-400">Host System</p>
<p class="font-semibold text-green-400" id="hostname">Loading...</p>
</div>
<div class="bg-gray-700 rounded-lg p-4 text-center">
<i class="fas fa-microchip text-3xl text-purple-400 mb-2"></i>
<p class="text-sm text-gray-400">GPU Access</p>
<p class="font-semibold text-green-400">Direct</p>
</div>
<div class="bg-gray-700 rounded-lg p-4 text-center">
<i class="fas fa-cube text-3xl text-red-400 mb-2"></i>
<p class="text-sm text-gray-400">Container</p>
<p class="font-semibold text-red-400">Not Used</p>
</div>
</div>
</div>
</main>
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<script>
// Initialize data
let utilData = Array(12).fill(0);
let hashData = Array(12).fill(0);
let utilChart, hashChart;
// Initialize charts
function initCharts() {
// Utilization chart
const utilCtx = document.getElementById('utilChart').getContext('2d');
utilChart = new Chart(utilCtx, {
type: 'line',
data: {
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
datasets: [{
label: 'GPU Utilization %',
data: utilData,
borderColor: 'rgb(147, 51, 234)',
backgroundColor: 'rgba(147, 51, 234, 0.1)',
tension: 0.4
}]
},
options: {
responsive: true,
plugins: { legend: { display: false } },
scales: {
y: { beginAtZero: true, max: 100, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
}
}
});
// Hash rate chart
const hashCtx = document.getElementById('hashChart').getContext('2d');
hashChart = new Chart(hashCtx, {
type: 'line',
data: {
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
datasets: [{
label: 'Hash Rate (MH/s)',
data: hashData,
borderColor: 'rgb(34, 197, 94)',
backgroundColor: 'rgba(34, 197, 94, 0.1)',
tension: 0.4
}]
},
options: {
responsive: true,
plugins: { legend: { display: false } },
scales: {
y: { beginAtZero: true, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
}
}
});
}
// Update GPU metrics
function updateGPU() {
// Simulate GPU metrics (in real implementation, fetch from API)
const util = Math.random() * 15; // Idle utilization 0-15%
const temp = 43 + Math.random() * 10;
const power = 18 + util * 0.5;
const mem = 2.9 + Math.random() * 0.5;
const hash = util * 2.5; // Simulated hash rate
// Update display
document.getElementById('gpuUtil').textContent = Math.round(util) + '%';
document.getElementById('gpuTemp').textContent = Math.round(temp) + '°C';
document.getElementById('gpuPower').textContent = Math.round(power) + 'W';
document.getElementById('gpuMem').textContent = mem.toFixed(1) + 'GB';
// Update charts
utilData.shift();
utilData.push(util);
utilChart.update('none');
hashData.shift();
hashData.push(hash);
hashChart.update('none');
}
// Load system info
function loadSystemInfo() {
document.getElementById('hostname').textContent = window.location.hostname;
}
// Initialize
document.addEventListener('DOMContentLoaded', () => {
initCharts();
loadSystemInfo();
updateGPU();
setInterval(updateGPU, 5000);
});
</script>
</body>
</html>
HTML
# Create startup script
cat > start-dashboard.sh << 'EOF'
#!/bin/bash
cd ~/miner-dashboard
echo ""
echo "========================================"
echo " Starting AITBC GPU Miner Dashboard"
echo "========================================"
echo ""
echo "Dashboard will be available at:"
echo " Local: http://localhost:8080"
echo " Network: http://$(hostname -I | awk '{print $1}'):8080"
echo ""
echo "Press Ctrl+C to stop the dashboard"
echo ""
python3 -m http.server 8080 --bind 0.0.0.0
EOF
chmod +x start-dashboard.sh
echo ""
echo "✅ Dashboard setup complete!"
echo ""
echo "To start the dashboard, run:"
echo " ~/miner-dashboard/start-dashboard.sh"
echo ""
echo "Dashboard location: ~/miner-dashboard/"
echo ""
echo "========================================"

View File

@@ -1,313 +0,0 @@
#!/bin/bash
echo "=== AITBC Miner Dashboard - Host Deployment ==="
echo ""
# Check if running on host with GPU
if ! command -v nvidia-smi &> /dev/null; then
echo "❌ nvidia-smi not found. Please install NVIDIA drivers."
exit 1
fi
# Create directory
mkdir -p ~/miner-dashboard
cd ~/miner-dashboard
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
# Create dashboard HTML
cat > index.html << 'EOF'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AITBC GPU Miner Dashboard</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<style>
@keyframes pulse-green {
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
}
.status-online { animation: pulse-green 2s infinite; }
.gpu-card { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }
</style>
</head>
<body class="bg-gray-900 text-white min-h-screen">
<header class="bg-gray-800 shadow-lg">
<div class="container mx-auto px-6 py-4">
<div class="flex items-center justify-between">
<div class="flex items-center space-x-4">
<i class="fas fa-microchip text-3xl text-purple-500"></i>
<div>
<h1 class="text-2xl font-bold">AITBC Miner Dashboard</h1>
<p class="text-sm text-gray-400">Host GPU Mining Operations</p>
</div>
</div>
<div class="flex items-center space-x-4">
<span class="flex items-center">
<span class="w-3 h-3 bg-green-500 rounded-full status-online mr-2"></span>
<span class="text-sm">GPU Connected</span>
</span>
<button onclick="refreshData()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
<i class="fas fa-sync-alt mr-2"></i>Refresh
</button>
</div>
</div>
</div>
</header>
<main class="container mx-auto px-6 py-8">
<!-- GPU Status -->
<div class="gpu-card rounded-xl p-6 mb-8 text-white">
<div class="flex items-center justify-between mb-6">
<div>
<h2 class="text-3xl font-bold mb-2" id="gpuName">Loading...</h2>
<p class="text-purple-200">Real-time GPU Status</p>
</div>
<div class="text-right">
<div class="text-4xl font-bold" id="gpuUtil">0%</div>
<div class="text-purple-200">GPU Utilization</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Temperature</p>
<p class="text-2xl font-bold" id="gpuTemp">--°C</p>
</div>
<i class="fas fa-thermometer-half text-3xl text-purple-300"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Power Usage</p>
<p class="text-2xl font-bold" id="gpuPower">--W</p>
</div>
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Memory Used</p>
<p class="text-2xl font-bold" id="gpuMem">--GB</p>
</div>
<i class="fas fa-memory text-3xl text-blue-400"></i>
</div>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Performance</p>
<p class="text-2xl font-bold" id="gpuPerf">--</p>
</div>
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
</div>
</div>
</div>
</div>
<!-- Mining Status -->
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
<!-- Active Jobs -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-tasks mr-3 text-green-500"></i>
Mining Status
</h3>
<div class="text-center py-8">
<i class="fas fa-pause-circle text-6xl text-yellow-500 mb-4"></i>
<p class="text-xl font-semibold text-yellow-500">Miner Idle</p>
<p class="text-gray-400 mt-2">Ready to accept mining jobs</p>
<button onclick="startMiner()" class="mt-4 bg-green-600 hover:bg-green-700 px-6 py-2 rounded-lg transition">
<i class="fas fa-play mr-2"></i>Start Mining
</button>
</div>
</div>
<!-- Services -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-server mr-3 text-blue-500"></i>
GPU Services Available
</h3>
<div class="space-y-3">
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">GPU Computing</p>
<p class="text-sm text-gray-400">CUDA cores ready</p>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">Hash Generation</p>
<p class="text-sm text-gray-400">Proof-of-work capable</p>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">AI Model Training</p>
<p class="text-sm text-gray-400">ML operations ready</p>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
</div>
</div>
</div>
</div>
<!-- Info -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">System Information</h3>
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
<div>
<p class="text-sm text-gray-400">Host System</p>
<p class="font-semibold" id="hostname">Loading...</p>
</div>
<div>
<p class="text-sm text-gray-400">GPU Driver</p>
<p class="font-semibold" id="driver">Loading...</p>
</div>
<div>
<p class="text-sm text-gray-400">CUDA Version</p>
<p class="font-semibold" id="cuda">Loading...</p>
</div>
</div>
</div>
</main>
<script>
// Load GPU info
async function loadGPUInfo() {
try {
const response = await fetch('/api/gpu');
const data = await response.json();
document.getElementById('gpuName').textContent = data.name;
document.getElementById('gpuUtil').textContent = data.utilization + '%';
document.getElementById('gpuTemp').textContent = data.temperature + '°C';
document.getElementById('gpuPower').textContent = data.power + 'W';
document.getElementById('gpuMem').textContent = data.memory_used + 'GB / ' + data.memory_total + 'GB';
document.getElementById('gpuPerf').textContent = data.performance_state;
document.getElementById('hostname').textContent = data.hostname;
document.getElementById('driver').textContent = data.driver_version;
document.getElementById('cuda').textContent = data.cuda_version;
} catch (e) {
console.error('Failed to load GPU info:', e);
}
}
// Refresh data
function refreshData() {
const btn = document.querySelector('button[onclick="refreshData()"]');
btn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Refreshing...';
loadGPUInfo().then(() => {
btn.innerHTML = '<i class="fas fa-sync-alt mr-2"></i>Refresh';
});
}
// Start miner (placeholder)
function startMiner() {
alert('Miner service would start here. This is a demo dashboard.');
}
// Initialize
loadGPUInfo();
setInterval(loadGPUInfo, 5000);
</script>
</body>
</html>
EOF
# Create Python server with API
cat > server.py << 'EOF'
import json
import subprocess
import socket
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse
class MinerHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == '/api/gpu':
self.send_json(self.get_gpu_info())
elif self.path == '/' or self.path == '/index.html':
self.serve_file('index.html')
else:
self.send_error(404)
def get_gpu_info(self):
try:
# Get GPU info
result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,driver_version,cuda_version', '--format=csv,noheader,nounits'],
capture_output=True, text=True)
if result.returncode == 0:
values = result.stdout.strip().split(', ')
return {
'name': values[0],
'utilization': int(values[1]),
'temperature': int(values[2]),
'power': float(values[3]),
'memory_used': float(values[4]) / 1024,
'memory_total': float(values[5]) / 1024,
'driver_version': values[6],
'cuda_version': values[7],
'hostname': socket.gethostname(),
'performance_state': 'P8' # Would need additional query
}
except Exception as e:
return {'error': str(e)}
def send_json(self, data):
self.send_response(200)
self.send_header('Content-Type', 'application/json')
self.end_headers()
self.wfile.write(json.dumps(data).encode())
def serve_file(self, filename):
try:
with open(filename, 'r') as f:
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(f.read().encode())
except FileNotFoundError:
self.send_error(404)
if __name__ == '__main__':
server = HTTPServer(('0.0.0.0', 8080), MinerHandler)
print('''
╔═══════════════════════════════════════╗
║ AITBC Miner Dashboard ║
║ Running on HOST with GPU access ║
╠═══════════════════════════════════════╣
║ Dashboard: http://localhost:8080 ║
║ Host: $(hostname) ║
║ GPU: $(nvidia-smi --query-gpu=name --format=csv,noheader) ║
╚═══════════════════════════════════════╝
''')
server.serve_forever()
EOF
# Make server executable
chmod +x server.py
echo ""
echo "✅ Dashboard created!"
echo ""
echo "To start the dashboard:"
echo " cd ~/miner-dashboard"
echo " python3 server.py"
echo ""
echo "Then access at: http://localhost:8080"
echo ""
echo "To auto-start on boot, add to crontab:"
echo " @reboot cd ~/miner-dashboard && python3 server.py &"

View File

@@ -1,189 +0,0 @@
#!/bin/bash
echo "=== AITBC Miner Dashboard - Host Setup ==="
echo ""
echo "This script sets up the dashboard on the HOST machine (at1)"
echo "NOT in the container (aitbc)"
echo ""
# Check if we have GPU access
if ! command -v nvidia-smi &> /dev/null; then
echo "❌ ERROR: nvidia-smi not found!"
echo "This script must be run on the HOST with GPU access"
exit 1
fi
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
# Create dashboard directory
mkdir -p ~/miner-dashboard
cd ~/miner-dashboard
# Create HTML dashboard
cat > index.html << 'HTML'
<!DOCTYPE html>
<html>
<head>
<title>AITBC GPU Miner Dashboard - HOST</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
</head>
<body class="bg-gray-900 text-white min-h-screen">
<div class="container mx-auto px-6 py-8">
<header class="mb-8">
<div class="flex items-center justify-between">
<div class="flex items-center space-x-4">
<i class="fas fa-microchip text-4xl text-purple-500"></i>
<div>
<h1 class="text-3xl font-bold">AITBC GPU Miner Dashboard</h1>
<p class="text-gray-400">Running on HOST with direct GPU access</p>
</div>
</div>
<div class="flex items-center space-x-2">
<span class="w-3 h-3 bg-green-500 rounded-full animate-pulse"></span>
<span class="text-green-500">GPU Connected</span>
</div>
</div>
</header>
<div class="bg-gradient-to-r from-purple-600 to-blue-600 rounded-xl p-8 mb-8 text-white">
<h2 class="text-2xl font-bold mb-6">GPU Status Monitor</h2>
<div class="grid grid-cols-2 md:grid-cols-4 gap-6">
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
<i class="fas fa-chart-line text-3xl mb-2"></i>
<p class="text-sm opacity-80">Utilization</p>
<p class="text-3xl font-bold" id="utilization">0%</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
<i class="fas fa-thermometer-half text-3xl mb-2"></i>
<p class="text-sm opacity-80">Temperature</p>
<p class="text-3xl font-bold" id="temperature">--°C</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
<i class="fas fa-bolt text-3xl mb-2"></i>
<p class="text-sm opacity-80">Power</p>
<p class="text-3xl font-bold" id="power">--W</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
<i class="fas fa-memory text-3xl mb-2"></i>
<p class="text-sm opacity-80">Memory</p>
<p class="text-3xl font-bold" id="memory">--GB</p>
</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-cog text-green-500 mr-2"></i>
Mining Operations
</h3>
<div class="space-y-4">
<div class="bg-gray-700 rounded-lg p-4">
<div class="flex justify-between items-center mb-2">
<span class="font-semibold">Status</span>
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Idle</span>
</div>
<p class="text-sm text-gray-400">Miner is ready to accept jobs</p>
</div>
<div class="bg-gray-700 rounded-lg p-4">
<div class="flex justify-between items-center mb-2">
<span class="font-semibold">Hash Rate</span>
<span class="text-green-400">0 MH/s</span>
</div>
<div class="w-full bg-gray-600 rounded-full h-2">
<div class="bg-green-500 h-2 rounded-full" style="width: 0%"></div>
</div>
</div>
</div>
</div>
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-server text-blue-500 mr-2"></i>
GPU Services
</h3>
<div class="space-y-3">
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
<span>CUDA Computing</span>
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
</div>
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
<span>Parallel Processing</span>
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
</div>
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
<span>Hash Generation</span>
<span class="bg-yellow-600 px-2 py-1 rounded text-xs">Standby</span>
</div>
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
<span>AI Model Training</span>
<span class="bg-gray-600 px-2 py-1 rounded text-xs">Available</span>
</div>
</div>
</div>
</div>
<div class="mt-8 bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">System Information</h3>
<div class="grid grid-cols-3 gap-6 text-center">
<div>
<p class="text-sm text-gray-400">Location</p>
<p class="font-semibold text-green-400">HOST System</p>
</div>
<div>
<p class="text-sm text-gray-400">GPU Access</p>
<p class="font-semibold text-green-400">Direct</p>
</div>
<div>
<p class="text-sm text-gray-400">Container</p>
<p class="font-semibold text-red-400">Not Used</p>
</div>
</div>
</div>
</div>
<script>
// Simulate real-time GPU data
function updateGPU() {
// In real implementation, this would fetch from an API
const util = Math.random() * 20; // 0-20% idle usage
const temp = 43 + Math.random() * 10;
const power = 18 + util * 0.5;
const mem = 2.9 + Math.random() * 0.5;
document.getElementById('utilization').textContent = Math.round(util) + '%';
document.getElementById('temperature').textContent = Math.round(temp) + '°C';
document.getElementById('power').textContent = Math.round(power) + 'W';
document.getElementById('memory').textContent = mem.toFixed(1) + 'GB';
}
// Update every 2 seconds
setInterval(updateGPU, 2000);
updateGPU();
</script>
</body>
</html>
HTML
# Create simple server
cat > serve.sh << 'EOF'
#!/bin/bash
cd ~/miner-dashboard
echo "Starting GPU Miner Dashboard on HOST..."
echo "Access at: http://localhost:8080"
echo "Press Ctrl+C to stop"
python3 -m http.server 8080 --bind 0.0.0.0
EOF
chmod +x serve.sh
echo ""
echo "✅ Dashboard created on HOST!"
echo ""
echo "To run the dashboard:"
echo " ~/miner-dashboard/serve.sh"
echo ""
echo "Dashboard will be available at:"
echo " - Local: http://localhost:8080"
echo " - Network: http://$(hostname -I | awk '{print $1}'):8080"

View File

@@ -1,449 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AITBC Miner Dashboard</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<style>
@keyframes pulse-green {
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
}
.status-online { animation: pulse-green 2s infinite; }
.gpu-card {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
}
.metric-card {
background: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
border: 1px solid rgba(255, 255, 255, 0.2);
}
</style>
</head>
<body class="bg-gray-900 text-white min-h-screen">
<!-- Header -->
<header class="bg-gray-800 shadow-lg">
<div class="container mx-auto px-6 py-4">
<div class="flex items-center justify-between">
<div class="flex items-center space-x-4">
<i class="fas fa-microchip text-3xl text-purple-500"></i>
<div>
<h1 class="text-2xl font-bold">AITBC Miner Dashboard</h1>
<p class="text-sm text-gray-400">GPU Mining Operations Monitor</p>
</div>
</div>
<div class="flex items-center space-x-4">
<span id="connectionStatus" class="flex items-center">
<span class="w-3 h-3 bg-green-500 rounded-full status-online mr-2"></span>
<span class="text-sm">Connected</span>
</span>
<button onclick="refreshData()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
<i class="fas fa-sync-alt mr-2"></i>Refresh
</button>
</div>
</div>
</div>
</header>
<!-- Main Content -->
<main class="container mx-auto px-6 py-8">
<!-- GPU Status Card -->
<div class="gpu-card rounded-xl p-6 mb-8 text-white">
<div class="flex items-center justify-between mb-6">
<div>
<h2 class="text-3xl font-bold mb-2">NVIDIA GeForce RTX 4060 Ti</h2>
<p class="text-purple-200">GPU Status & Performance</p>
</div>
<div class="text-right">
<div class="text-4xl font-bold" id="gpuUtilization">0%</div>
<div class="text-purple-200">GPU Utilization</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
<div class="metric-card rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Temperature</p>
<p class="text-2xl font-bold" id="gpuTemp">43°C</p>
</div>
<i class="fas fa-thermometer-half text-3xl text-purple-300"></i>
</div>
</div>
<div class="metric-card rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Power Usage</p>
<p class="text-2xl font-bold" id="powerUsage">18W</p>
</div>
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
</div>
</div>
<div class="metric-card rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Memory Used</p>
<p class="text-2xl font-bold" id="memoryUsage">2.9GB</p>
</div>
<i class="fas fa-memory text-3xl text-blue-400"></i>
</div>
</div>
<div class="metric-card rounded-lg p-4">
<div class="flex items-center justify-between">
<div>
<p class="text-purple-200 text-sm">Performance</p>
<p class="text-2xl font-bold" id="perfState">P8</p>
</div>
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
</div>
</div>
</div>
</div>
<!-- Mining Services -->
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
<!-- Active Mining Jobs -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-tasks mr-3 text-green-500"></i>
Active Mining Jobs
</h3>
<div id="miningJobs" class="space-y-3">
<div class="bg-gray-700 rounded-lg p-4">
<div class="flex justify-between items-center">
<div>
<p class="font-semibold">Matrix Computation</p>
<p class="text-sm text-gray-400">Job ID: #12345</p>
</div>
<div class="text-right">
<p class="text-green-400 font-semibold">85%</p>
<p class="text-xs text-gray-400">Complete</p>
</div>
</div>
<div class="mt-3 bg-gray-600 rounded-full h-2">
<div class="bg-green-500 h-2 rounded-full" style="width: 85%"></div>
</div>
</div>
<div class="bg-gray-700 rounded-lg p-4">
<div class="flex justify-between items-center">
<div>
<p class="font-semibold">Hash Validation</p>
<p class="text-sm text-gray-400">Job ID: #12346</p>
</div>
<div class="text-right">
<p class="text-yellow-400 font-semibold">42%</p>
<p class="text-xs text-gray-400">Complete</p>
</div>
</div>
<div class="mt-3 bg-gray-600 rounded-full h-2">
<div class="bg-yellow-500 h-2 rounded-full" style="width: 42%"></div>
</div>
</div>
</div>
</div>
<!-- Mining Services -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-server mr-3 text-blue-500"></i>
Available Services
</h3>
<div class="space-y-3">
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">GPU Computing</p>
<p class="text-sm text-gray-400">CUDA cores available for computation</p>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">Parallel Processing</p>
<p class="text-sm text-gray-400">Multi-threaded job execution</p>
</div>
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">Hash Generation</p>
<p class="text-sm text-gray-400">Proof-of-work computation</p>
</div>
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Standby</span>
</div>
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">AI Model Training</p>
<p class="text-sm text-gray-400">Machine learning operations</p>
</div>
<span class="bg-gray-600 px-3 py-1 rounded-full text-sm">Available</span>
</div>
</div>
</div>
</div>
<!-- Performance Charts -->
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8">
<!-- GPU Utilization Chart -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">GPU Utilization (Last Hour)</h3>
<canvas id="utilizationChart"></canvas>
</div>
<!-- Hash Rate Chart -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">Hash Rate Performance</h3>
<canvas id="hashRateChart"></canvas>
</div>
</div>
<!-- Statistics -->
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4 mb-8">
<div class="bg-gray-800 rounded-lg p-4 text-center">
<p class="text-gray-400 text-sm">Total Jobs Completed</p>
<p class="text-3xl font-bold text-green-500" id="totalJobs">0</p>
</div>
<div class="bg-gray-800 rounded-lg p-4 text-center">
<p class="text-gray-400 text-sm">Average Job Time</p>
<p class="text-3xl font-bold text-blue-500" id="avgJobTime">0s</p>
</div>
<div class="bg-gray-800 rounded-lg p-4 text-center">
<p class="text-gray-400 text-sm">Success Rate</p>
<p class="text-3xl font-bold text-purple-500" id="successRate">0%</p>
</div>
<div class="bg-gray-800 rounded-lg p-4 text-center">
<p class="text-gray-400 text-sm">Hash Rate</p>
<p class="text-3xl font-bold text-yellow-500" id="hashRate">0 MH/s</p>
</div>
</div>
<!-- Service Details -->
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">Service Capabilities</h3>
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="serviceDetails">
<!-- Service details will be loaded here -->
</div>
</div>
</main>
<script>
// Chart instances
let utilizationChart, hashRateChart;
// Initialize dashboard
async function initDashboard() {
await loadGPUStatus();
await loadMiningJobs();
await loadServices();
await loadStatistics();
initCharts();
// Auto-refresh every 5 seconds
setInterval(refreshData, 5000);
}
// Load GPU status
async function loadGPUStatus() {
try {
const response = await fetch('/api/gpu-status');
const data = await response.json();
document.getElementById('gpuUtilization').textContent = data.utilization + '%';
document.getElementById('gpuTemp').textContent = data.temperature + '°C';
document.getElementById('powerUsage').textContent = data.power_usage + 'W';
document.getElementById('memoryUsage').textContent = data.memory_used.toFixed(1) + 'GB';
document.getElementById('perfState').textContent = data.performance_state;
// Update utilization chart
if (utilizationChart) {
utilizationChart.data.datasets[0].data.shift();
utilizationChart.data.datasets[0].data.push(data.utilization);
utilizationChart.update('none');
}
} catch (error) {
console.error('Failed to load GPU status:', error);
}
}
// Load mining jobs
async function loadMiningJobs() {
try {
const response = await fetch('/api/mining-jobs');
const jobs = await response.json();
const jobsContainer = document.getElementById('miningJobs');
document.getElementById('jobCount').textContent = jobs.length + ' jobs';
if (jobs.length === 0) {
jobsContainer.innerHTML = `
<div class="text-center text-gray-500 py-8">
<i class="fas fa-inbox text-4xl mb-3"></i>
<p>No active jobs</p>
</div>
`;
} else {
jobsContainer.innerHTML = jobs.map(job => `
<div class="bg-gray-700 rounded-lg p-4">
<div class="flex justify-between items-center">
<div>
<p class="font-semibold">${job.name}</p>
<p class="text-sm text-gray-400">Job ID: #${job.id}</p>
</div>
<div class="text-right">
<p class="text-${job.progress > 70 ? 'green' : job.progress > 30 ? 'yellow' : 'red'}-400 font-semibold">${job.progress}%</p>
<p class="text-xs text-gray-400">${job.status}</p>
</div>
</div>
<div class="mt-3 bg-gray-600 rounded-full h-2">
<div class="bg-${job.progress > 70 ? 'green' : job.progress > 30 ? 'yellow' : 'red'}-500 h-2 rounded-full transition-all duration-500" style="width: ${job.progress}%"></div>
</div>
</div>
`).join('');
}
} catch (error) {
console.error('Failed to load mining jobs:', error);
}
}
// Load services
async function loadServices() {
try {
const response = await fetch('/api/services');
const services = await response.json();
const servicesContainer = document.getElementById('miningServices');
servicesContainer.innerHTML = services.map(service => `
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
<div>
<p class="font-semibold">${service.name}</p>
<p class="text-sm text-gray-400">${service.description}</p>
</div>
<span class="bg-${service.status === 'active' ? 'green' : service.status === 'standby' ? 'yellow' : 'gray'}-600 px-3 py-1 rounded-full text-sm">
${service.status}
</span>
</div>
`).join('');
// Load service details
const detailsContainer = document.getElementById('serviceDetails');
detailsContainer.innerHTML = services.map(service => `
<div class="bg-gray-700 rounded-lg p-4">
<h4 class="font-semibold mb-2">${service.name}</h4>
<p class="text-sm text-gray-400 mb-3">${service.description}</p>
<div class="space-y-2">
<div class="flex justify-between text-sm">
<span>Capacity:</span>
<span>${service.capacity}</span>
</div>
<div class="flex justify-between text-sm">
<span>Utilization:</span>
<span>${service.utilization}%</span>
</div>
<div class="bg-gray-600 rounded-full h-2 mt-2">
<div class="bg-blue-500 h-2 rounded-full" style="width: ${service.utilization}%"></div>
</div>
</div>
</div>
`).join('');
} catch (error) {
console.error('Failed to load services:', error);
}
}
// Load statistics
async function loadStatistics() {
try {
const response = await fetch('/api/statistics');
const stats = await response.json();
document.getElementById('totalJobs').textContent = stats.total_jobs_completed.toLocaleString();
document.getElementById('avgJobTime').textContent = stats.average_job_time + 's';
document.getElementById('successRate').textContent = stats.success_rate + '%';
document.getElementById('hashRate').textContent = stats.hash_rate + ' MH/s';
// Update hash rate chart
if (hashRateChart) {
hashRateChart.data.datasets[0].data.shift();
hashRateChart.data.datasets[0].data.push(stats.hash_rate);
hashRateChart.update('none');
}
} catch (error) {
console.error('Failed to load statistics:', error);
}
}
// Initialize charts
function initCharts() {
// Utilization chart
const utilizationCtx = document.getElementById('utilizationChart').getContext('2d');
utilizationChart = new Chart(utilizationCtx, {
type: 'line',
data: {
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
datasets: [{
label: 'GPU Utilization %',
data: Array(12).fill(0),
borderColor: 'rgb(147, 51, 234)',
backgroundColor: 'rgba(147, 51, 234, 0.1)',
tension: 0.4
}]
},
options: {
responsive: true,
animation: { duration: 0 },
plugins: { legend: { display: false } },
scales: {
y: { beginAtZero: true, max: 100, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
}
}
});
// Hash rate chart
const hashRateCtx = document.getElementById('hashRateChart').getContext('2d');
hashRateChart = new Chart(hashRateCtx, {
type: 'line',
data: {
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
datasets: [{
label: 'Hash Rate (MH/s)',
data: Array(12).fill(0),
borderColor: 'rgb(34, 197, 94)',
backgroundColor: 'rgba(34, 197, 94, 0.1)',
tension: 0.4
}]
},
options: {
responsive: true,
animation: { duration: 0 },
plugins: { legend: { display: false } },
scales: {
y: { beginAtZero: true, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
}
}
});
}
// Refresh all data
async function refreshData() {
const refreshBtn = document.querySelector('button[onclick="refreshData()"]');
refreshBtn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Refreshing...';
await Promise.all([
loadGPUStatus(),
loadMiningJobs(),
loadServices(),
loadStatistics()
]);
refreshBtn.innerHTML = '<i class="fas fa-sync-alt mr-2"></i>Refresh';
}
// Initialize on load
document.addEventListener('DOMContentLoaded', initDashboard);
</script>
</body>
</html>

View File

@@ -1,181 +0,0 @@
#!/usr/bin/env python3
"""AITBC GPU Mining Service"""
import subprocess
import time
import json
import random
from datetime import datetime
import threading
class AITBCMiner:
def __init__(self):
self.running = False
self.jobs = []
self.stats = {
'total_jobs': 0,
'completed_jobs': 0,
'failed_jobs': 0,
'hash_rate': 0,
'uptime': 0
}
self.start_time = None
def start_mining(self):
"""Start the mining service"""
self.running = True
self.start_time = time.time()
print("🚀 AITBC Miner started")
# Start mining threads
mining_thread = threading.Thread(target=self._mining_loop)
mining_thread.daemon = True
mining_thread.start()
# Start status monitoring
monitor_thread = threading.Thread(target=self._monitor_gpu)
monitor_thread.daemon = True
monitor_thread.start()
def stop_mining(self):
"""Stop the mining service"""
self.running = False
print("⛔ AITBC Miner stopped")
def _mining_loop(self):
"""Main mining loop"""
while self.running:
# Simulate job processing
if random.random() > 0.7: # 30% chance of new job
job = self._create_job()
self.jobs.append(job)
self._process_job(job)
time.sleep(1)
def _create_job(self):
"""Create a new mining job"""
job_types = [
'Matrix Computation',
'Hash Validation',
'Block Verification',
'Transaction Processing',
'AI Model Training'
]
job = {
'id': f"job_{int(time.time())}_{random.randint(1000, 9999)}",
'name': random.choice(job_types),
'progress': 0,
'status': 'running',
'created_at': datetime.now().isoformat()
}
self.stats['total_jobs'] += 1
return job
def _process_job(self, job):
"""Process a mining job"""
processing_thread = threading.Thread(target=self._process_job_thread, args=(job,))
processing_thread.daemon = True
processing_thread.start()
def _process_job_thread(self, job):
"""Process job in separate thread"""
duration = random.randint(5, 30)
steps = 20
for i in range(steps + 1):
if not self.running:
break
job['progress'] = int((i / steps) * 100)
time.sleep(duration / steps)
if self.running:
job['status'] = 'completed' if random.random() > 0.05 else 'failed'
job['completed_at'] = datetime.now().isoformat()
if job['status'] == 'completed':
self.stats['completed_jobs'] += 1
else:
self.stats['failed_jobs'] += 1
def _monitor_gpu(self):
"""Monitor GPU status"""
while self.running:
try:
# Get GPU utilization
result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu', '--format=csv,noheader,nounits'],
capture_output=True, text=True)
if result.returncode == 0:
gpu_util = int(result.stdout.strip())
# Simulate hash rate based on GPU utilization
self.stats['hash_rate'] = round(gpu_util * 0.5 + random.uniform(-5, 5), 1)
except Exception as e:
print(f"GPU monitoring error: {e}")
self.stats['hash_rate'] = random.uniform(40, 60)
# Update uptime
if self.start_time:
self.stats['uptime'] = int(time.time() - self.start_time)
time.sleep(2)
def get_status(self):
"""Get current mining status"""
return {
'running': self.running,
'stats': self.stats.copy(),
'active_jobs': [j for j in self.jobs if j['status'] == 'running'],
'gpu_info': self._get_gpu_info()
}
def _get_gpu_info(self):
"""Get GPU information"""
try:
result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total',
'--format=csv,noheader,nounits'],
capture_output=True, text=True)
if result.returncode == 0:
values = result.stdout.strip().split(', ')
return {
'name': values[0],
'utilization': int(values[1]),
'temperature': int(values[2]),
'power': float(values[3]),
'memory_used': float(values[4]),
'memory_total': float(values[5])
}
except:
pass
return {
'name': 'NVIDIA GeForce RTX 4060 Ti',
'utilization': 0,
'temperature': 43,
'power': 18,
'memory_used': 2902,
'memory_total': 16380
}
# Global miner instance
miner = AITBCMiner()
if __name__ == "__main__":
print("AITBC GPU Mining Service")
print("=" * 40)
try:
miner.start_mining()
# Keep running
while True:
time.sleep(10)
except KeyboardInterrupt:
print("\nShutting down...")
miner.stop_mining()

View File

@@ -1,180 +0,0 @@
#!/bin/bash
echo "=== Quick AITBC Miner Dashboard Setup ==="
# Create directory
sudo mkdir -p /opt/aitbc-miner-dashboard
# Create simple dashboard
cat > /opt/aitbc-miner-dashboard/index.html << 'HTML'
<!DOCTYPE html>
<html>
<head>
<title>AITBC Miner Dashboard</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
</head>
<body class="bg-gray-900 text-white min-h-screen">
<div class="container mx-auto px-6 py-8">
<div class="flex items-center justify-between mb-8">
<h1 class="text-3xl font-bold flex items-center">
<i class="fas fa-microchip text-purple-500 mr-3"></i>
AITBC Miner Dashboard
</h1>
<div class="flex items-center">
<span class="w-3 h-3 bg-green-500 rounded-full mr-2"></span>
<span>GPU Connected</span>
</div>
</div>
<div class="bg-gradient-to-r from-purple-600 to-blue-600 rounded-xl p-6 mb-8">
<h2 class="text-2xl font-bold mb-4">NVIDIA GeForce RTX 4060 Ti</h2>
<div class="grid grid-cols-2 md:grid-cols-4 gap-4">
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<p class="text-sm opacity-80">Utilization</p>
<p class="text-2xl font-bold" id="util">0%</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<p class="text-sm opacity-80">Temperature</p>
<p class="text-2xl font-bold" id="temp">43°C</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<p class="text-sm opacity-80">Power</p>
<p class="text-2xl font-bold" id="power">18W</p>
</div>
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
<p class="text-sm opacity-80">Memory</p>
<p class="text-2xl font-bold" id="mem">2.9GB</p>
</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-tasks text-green-500 mr-2"></i>
Mining Jobs
</h3>
<div class="text-center text-gray-500 py-12">
<i class="fas fa-inbox text-5xl mb-4"></i>
<p>No active jobs</p>
<p class="text-sm mt-2">Miner is ready to receive jobs</p>
</div>
</div>
<div class="bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4 flex items-center">
<i class="fas fa-server text-blue-500 mr-2"></i>
Available Services
</h3>
<div class="space-y-3">
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
<span>GPU Computing</span>
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
<span>Parallel Processing</span>
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
</div>
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
<span>Hash Generation</span>
<span class="bg-yellow-600 px-2 py-1 rounded text-xs">Standby</span>
</div>
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
<span>AI Model Training</span>
<span class="bg-gray-600 px-2 py-1 rounded text-xs">Available</span>
</div>
</div>
</div>
</div>
<div class="mt-8 bg-gray-800 rounded-xl p-6">
<h3 class="text-xl font-bold mb-4">Mining Statistics</h3>
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 text-center">
<div>
<p class="text-3xl font-bold text-green-500">0</p>
<p class="text-sm text-gray-400">Jobs Completed</p>
</div>
<div>
<p class="text-3xl font-bold text-blue-500">0s</p>
<p class="text-sm text-gray-400">Avg Job Time</p>
</div>
<div>
<p class="text-3xl font-bold text-purple-500">100%</p>
<p class="text-sm text-gray-400">Success Rate</p>
</div>
<div>
<p class="text-3xl font-bold text-yellow-500">0 MH/s</p>
<p class="text-sm text-gray-400">Hash Rate</p>
</div>
</div>
</div>
</div>
<script>
// Simulate real-time updates
let util = 0;
let temp = 43;
let power = 18;
function updateStats() {
// Simulate GPU usage
util = Math.max(0, Math.min(100, util + (Math.random() - 0.5) * 10));
temp = Math.max(35, Math.min(85, temp + (Math.random() - 0.5) * 2));
power = Math.max(10, Math.min(165, util * 1.5 + (Math.random() - 0.5) * 5));
document.getElementById('util').textContent = Math.round(util) + '%';
document.getElementById('temp').textContent = Math.round(temp) + '°C';
document.getElementById('power').textContent = Math.round(power) + 'W';
document.getElementById('mem').textContent = (2.9 + util * 0.1).toFixed(1) + 'GB';
}
// Update every 2 seconds
setInterval(updateStats, 2000);
updateStats();
</script>
</body>
</html>
HTML
# Create simple Python server
cat > /opt/aitbc-miner-dashboard/serve.py << 'PY'
import http.server
import socketserver
import os
PORT = 8080
os.chdir('/opt/aitbc-miner-dashboard')
Handler = http.server.SimpleHTTPRequestHandler
with socketserver.TCPServer(("", PORT), Handler) as httpd:
print(f"Dashboard running at http://localhost:{PORT}")
httpd.serve_forever()
PY
# Create systemd service
cat > /etc/systemd/system/aitbc-miner-dashboard.service << 'EOF'
[Unit]
Description=AITBC Miner Dashboard
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/aitbc-miner-dashboard
ExecStart=/usr/bin/python3 serve.py
Restart=always
[Install]
WantedBy=multi-user.target
EOF
# Start service
systemctl daemon-reload
systemctl enable aitbc-miner-dashboard
systemctl start aitbc-miner-dashboard
echo ""
echo "✅ Dashboard deployed!"
echo "Access at: http://localhost:8080"
echo "Check status: systemctl status aitbc-miner-dashboard"

View File

@@ -1,30 +0,0 @@
#!/bin/bash
echo "=== AITBC Miner Dashboard Setup ==="
echo ""
# Create directory
sudo mkdir -p /opt/aitbc-miner-dashboard
sudo cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/
# Create virtual environment
cd /opt/aitbc-miner-dashboard
sudo python3 -m venv .venv
sudo .venv/bin/pip install psutil
# Install systemd service
sudo cp aitbc-miner-dashboard.service /etc/systemd/system/
sudo systemctl daemon-reload
sudo systemctl enable aitbc-miner-dashboard
sudo systemctl start aitbc-miner-dashboard
# Wait for service to start
sleep 3
# Check status
sudo systemctl status aitbc-miner-dashboard --no-pager -l | head -10
echo ""
echo "✅ Miner Dashboard is running at: http://localhost:8080"
echo ""
echo "To access from other machines, use: http://$(hostname -I | awk '{print $1}'):8080"

View File

@@ -1,27 +0,0 @@
# Miner Node
## Purpose & Scope
Worker daemon responsible for executing compute jobs on CPU/GPU hardware, reporting telemetry, and submitting proofs back to the coordinator. See `docs/bootstrap/miner_node.md` for the detailed implementation roadmap.
## Development Setup
- Create a Python virtual environment under `apps/miner-node/.venv`.
- Install dependencies (FastAPI optional for health endpoint, `httpx`, `pydantic`, `psutil`).
- Implement the package structure described in the bootstrap guide.
## Production Deployment (systemd)
1. Copy the project to `/opt/aitbc/apps/miner-node/` on the target host.
2. Create a virtual environment and install dependencies as needed.
3. Populate `.env` with coordinator URL/API token settings.
4. Run the installer script from repo root:
```bash
sudo scripts/ops/install_miner_systemd.sh
```
This installs `configs/systemd/aitbc-miner.service`, reloads systemd, and enables the service.
5. Check status/logs:
```bash
sudo systemctl status aitbc-miner
journalctl -u aitbc-miner -f
```

View File

@@ -1,15 +0,0 @@
"""
Miner plugin system for GPU service execution
"""
from .base import ServicePlugin, PluginResult
from .registry import PluginRegistry
from .exceptions import PluginError, PluginNotFoundError
__all__ = [
"ServicePlugin",
"PluginResult",
"PluginRegistry",
"PluginError",
"PluginNotFoundError"
]

View File

@@ -1,111 +0,0 @@
"""
Base plugin interface for GPU service execution
"""
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, List
from dataclasses import dataclass
from datetime import datetime
import asyncio
@dataclass
class PluginResult:
"""Result from plugin execution"""
success: bool
data: Optional[Dict[str, Any]] = None
error: Optional[str] = None
metrics: Optional[Dict[str, Any]] = None
execution_time: Optional[float] = None
class ServicePlugin(ABC):
"""Base class for all service plugins"""
def __init__(self):
self.service_id = None
self.name = None
self.version = "1.0.0"
self.description = ""
self.capabilities = []
@abstractmethod
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute the service with given parameters"""
pass
@abstractmethod
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate request parameters, return list of errors"""
pass
@abstractmethod
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for this plugin"""
pass
def get_metrics(self) -> Dict[str, Any]:
"""Get plugin-specific metrics"""
return {
"service_id": self.service_id,
"name": self.name,
"version": self.version
}
async def health_check(self) -> bool:
"""Check if plugin dependencies are available"""
return True
def setup(self) -> None:
"""Initialize plugin resources"""
pass
def cleanup(self) -> None:
"""Cleanup plugin resources"""
pass
class GPUPlugin(ServicePlugin):
"""Base class for GPU-accelerated plugins"""
def __init__(self):
super().__init__()
self.gpu_available = False
self.vram_gb = 0
self.cuda_available = False
def setup(self) -> None:
"""Check GPU availability"""
self._detect_gpu()
def _detect_gpu(self) -> None:
"""Detect GPU and VRAM"""
try:
import torch
if torch.cuda.is_available():
self.gpu_available = True
self.cuda_available = True
self.vram_gb = torch.cuda.get_device_properties(0).total_memory / (1024**3)
except ImportError:
pass
try:
import GPUtil
gpus = GPUtil.getGPUs()
if gpus:
self.gpu_available = True
self.vram_gb = gpus[0].memoryTotal / 1024
except ImportError:
pass
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Default GPU requirements"""
return {
"gpu": "any",
"vram_gb": 4,
"cuda": "recommended"
}
async def health_check(self) -> bool:
"""Check GPU health"""
return self.gpu_available

View File

@@ -1,371 +0,0 @@
"""
Blender 3D rendering plugin
"""
import asyncio
import os
import subprocess
import tempfile
import json
from typing import Dict, Any, List, Optional
import time
from .base import GPUPlugin, PluginResult
from .exceptions import PluginExecutionError
class BlenderPlugin(GPUPlugin):
"""Plugin for Blender 3D rendering"""
def __init__(self):
super().__init__()
self.service_id = "blender"
self.name = "Blender Rendering"
self.version = "1.0.0"
self.description = "Render 3D scenes using Blender"
self.capabilities = ["render", "animation", "cycles", "eevee"]
def setup(self) -> None:
"""Initialize Blender dependencies"""
super().setup()
# Check for Blender installation
try:
result = subprocess.run(
["blender", "--version"],
capture_output=True,
text=True,
check=True
)
self.blender_path = "blender"
except (subprocess.CalledProcessError, FileNotFoundError):
raise PluginExecutionError("Blender not found. Install Blender for 3D rendering")
# Check for bpy module (Python API)
try:
import bpy
self.bpy_available = True
except ImportError:
self.bpy_available = False
print("Warning: bpy module not available. Some features may be limited.")
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate Blender request parameters"""
errors = []
# Check required parameters
if "blend_file" not in request and "scene_data" not in request:
errors.append("Either 'blend_file' or 'scene_data' must be provided")
# Validate engine
engine = request.get("engine", "cycles")
valid_engines = ["cycles", "eevee", "workbench"]
if engine not in valid_engines:
errors.append(f"Invalid engine. Must be one of: {', '.join(valid_engines)}")
# Validate resolution
resolution_x = request.get("resolution_x", 1920)
resolution_y = request.get("resolution_y", 1080)
if not isinstance(resolution_x, int) or resolution_x < 1 or resolution_x > 65536:
errors.append("resolution_x must be an integer between 1 and 65536")
if not isinstance(resolution_y, int) or resolution_y < 1 or resolution_y > 65536:
errors.append("resolution_y must be an integer between 1 and 65536")
# Validate samples
samples = request.get("samples", 128)
if not isinstance(samples, int) or samples < 1 or samples > 10000:
errors.append("samples must be an integer between 1 and 10000")
# Validate frame range for animation
if request.get("animation", False):
frame_start = request.get("frame_start", 1)
frame_end = request.get("frame_end", 250)
if not isinstance(frame_start, int) or frame_start < 1:
errors.append("frame_start must be >= 1")
if not isinstance(frame_end, int) or frame_end < frame_start:
errors.append("frame_end must be >= frame_start")
return errors
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for Blender"""
return {
"gpu": "recommended",
"vram_gb": 4,
"ram_gb": 16,
"cuda": "recommended"
}
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute Blender rendering"""
start_time = time.time()
try:
# Validate request
errors = self.validate_request(request)
if errors:
return PluginResult(
success=False,
error=f"Validation failed: {'; '.join(errors)}"
)
# Get parameters
blend_file = request.get("blend_file")
scene_data = request.get("scene_data")
engine = request.get("engine", "cycles")
resolution_x = request.get("resolution_x", 1920)
resolution_y = request.get("resolution_y", 1080)
samples = request.get("samples", 128)
animation = request.get("animation", False)
frame_start = request.get("frame_start", 1)
frame_end = request.get("frame_end", 250)
output_format = request.get("output_format", "png")
gpu_acceleration = request.get("gpu_acceleration", self.gpu_available)
# Prepare input file
input_file = await self._prepare_input_file(blend_file, scene_data)
# Build Blender command
cmd = self._build_blender_command(
input_file=input_file,
engine=engine,
resolution_x=resolution_x,
resolution_y=resolution_y,
samples=samples,
animation=animation,
frame_start=frame_start,
frame_end=frame_end,
output_format=output_format,
gpu_acceleration=gpu_acceleration
)
# Execute Blender
output_files = await self._execute_blender(cmd, animation, frame_start, frame_end)
# Get render statistics
render_stats = await self._get_render_stats(output_files[0] if output_files else None)
# Clean up input file if created from scene data
if scene_data:
os.unlink(input_file)
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"output_files": output_files,
"count": len(output_files),
"animation": animation,
"parameters": {
"engine": engine,
"resolution": f"{resolution_x}x{resolution_y}",
"samples": samples,
"gpu_acceleration": gpu_acceleration
}
},
metrics={
"engine": engine,
"frames_rendered": len(output_files),
"render_time": execution_time,
"time_per_frame": execution_time / len(output_files) if output_files else 0,
"samples_per_second": (samples * len(output_files)) / execution_time if execution_time > 0 else 0,
"render_stats": render_stats
},
execution_time=execution_time
)
except Exception as e:
return PluginResult(
success=False,
error=str(e),
execution_time=time.time() - start_time
)
async def _prepare_input_file(self, blend_file: Optional[str], scene_data: Optional[Dict]) -> str:
"""Prepare input .blend file"""
if blend_file:
# Use provided file
if not os.path.exists(blend_file):
raise PluginExecutionError(f"Blend file not found: {blend_file}")
return blend_file
elif scene_data:
# Create blend file from scene data
if not self.bpy_available:
raise PluginExecutionError("Cannot create scene without bpy module")
# Create a temporary Python script to generate the scene
script = tempfile.mktemp(suffix=".py")
output_blend = tempfile.mktemp(suffix=".blend")
with open(script, "w") as f:
f.write(f"""
import bpy
import json
# Load scene data
scene_data = json.loads('''{json.dumps(scene_data)}''')
# Clear default scene
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete()
# Create scene from data
# This is a simplified example - in practice, you'd parse the scene_data
# and create appropriate objects, materials, lights, etc.
# Save blend file
bpy.ops.wm.save_as_mainfile(filepath='{output_blend}')
""")
# Run Blender to create the scene
cmd = [self.blender_path, "--background", "--python", script]
process = await asyncio.create_subprocess_exec(*cmd)
await process.communicate()
# Clean up script
os.unlink(script)
return output_blend
else:
raise PluginExecutionError("Either blend_file or scene_data must be provided")
def _build_blender_command(
self,
input_file: str,
engine: str,
resolution_x: int,
resolution_y: int,
samples: int,
animation: bool,
frame_start: int,
frame_end: int,
output_format: str,
gpu_acceleration: bool
) -> List[str]:
"""Build Blender command"""
cmd = [
self.blender_path,
"--background",
input_file,
"--render-engine", engine,
"--render-format", output_format.upper()
]
# Add Python script for settings
script = tempfile.mktemp(suffix=".py")
with open(script, "w") as f:
f.write(f"""
import bpy
# Set resolution
bpy.context.scene.render.resolution_x = {resolution_x}
bpy.context.scene.render.resolution_y = {resolution_y}
# Set samples for Cycles
if bpy.context.scene.render.engine == 'CYCLES':
bpy.context.scene.cycles.samples = {samples}
# Enable GPU rendering if available
if {str(gpu_acceleration).lower()}:
bpy.context.scene.cycles.device = 'GPU'
preferences = bpy.context.preferences
cycles_preferences = preferences.addons['cycles'].preferences
cycles_preferences.compute_device_type = 'CUDA'
cycles_preferences.get_devices()
for device in cycles_preferences.devices:
device.use = True
# Set frame range for animation
if {str(animation).lower()}:
bpy.context.scene.frame_start = {frame_start}
bpy.context.scene.frame_end = {frame_end}
# Set output path
bpy.context.scene.render.filepath = '{tempfile.mkdtemp()}/render_'
# Save settings
bpy.ops.wm.save_mainfile()
""")
cmd.extend(["--python", script])
# Add render command
if animation:
cmd.extend(["-a"]) # Render animation
else:
cmd.extend(["-f", "1"]) # Render single frame
return cmd
async def _execute_blender(
self,
cmd: List[str],
animation: bool,
frame_start: int,
frame_end: int
) -> List[str]:
"""Execute Blender command"""
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
error_msg = stderr.decode() if stderr else "Blender failed"
raise PluginExecutionError(f"Blender error: {error_msg}")
# Find output files
output_dir = tempfile.mkdtemp()
output_pattern = os.path.join(output_dir, "render_*")
if animation:
# Animation creates multiple files
import glob
output_files = glob.glob(output_pattern)
output_files.sort() # Ensure frame order
else:
# Single frame
output_files = [glob.glob(output_pattern)[0]]
return output_files
async def _get_render_stats(self, output_file: Optional[str]) -> Dict[str, Any]:
"""Get render statistics"""
if not output_file or not os.path.exists(output_file):
return {}
# Get file size and basic info
file_size = os.path.getsize(output_file)
# Try to get image dimensions
try:
from PIL import Image
with Image.open(output_file) as img:
width, height = img.size
except:
width = height = None
return {
"file_size": file_size,
"width": width,
"height": height,
"format": os.path.splitext(output_file)[1][1:].upper()
}
async def health_check(self) -> bool:
"""Check Blender health"""
try:
result = subprocess.run(
["blender", "--version"],
capture_output=True,
check=True
)
return True
except subprocess.CalledProcessError:
return False

View File

@@ -1,215 +0,0 @@
"""
Plugin discovery and matching system
"""
import asyncio
import logging
from typing import Dict, List, Set, Optional
import requests
from .registry import registry
from .base import ServicePlugin
from .exceptions import PluginNotFoundError
logger = logging.getLogger(__name__)
class ServiceDiscovery:
"""Discovers and matches services to plugins"""
def __init__(self, pool_hub_url: str, miner_id: str):
self.pool_hub_url = pool_hub_url
self.miner_id = miner_id
self.enabled_services: Set[str] = set()
self.service_configs: Dict[str, Dict] = {}
self._last_update = 0
self._update_interval = 60 # seconds
async def start(self) -> None:
"""Start the discovery service"""
logger.info("Starting service discovery")
# Initialize plugin registry
await registry.initialize()
# Initial sync
await self.sync_services()
# Start background sync task
asyncio.create_task(self._sync_loop())
async def sync_services(self) -> None:
"""Sync enabled services from pool-hub"""
try:
# Get service configurations from pool-hub
response = requests.get(
f"{self.pool_hub_url}/v1/services/",
headers={"X-Miner-ID": self.miner_id}
)
response.raise_for_status()
services = response.json()
# Update local state
new_enabled = set()
new_configs = {}
for service in services:
if service.get("enabled", False):
service_id = service["service_type"]
new_enabled.add(service_id)
new_configs[service_id] = service
# Find changes
added = new_enabled - self.enabled_services
removed = self.enabled_services - new_enabled
updated = set()
for service_id in self.enabled_services & new_enabled:
if new_configs[service_id] != self.service_configs.get(service_id):
updated.add(service_id)
# Apply changes
for service_id in removed:
await self._disable_service(service_id)
for service_id in added:
await self._enable_service(service_id, new_configs[service_id])
for service_id in updated:
await self._update_service(service_id, new_configs[service_id])
# Update state
self.enabled_services = new_enabled
self.service_configs = new_configs
self._last_update = asyncio.get_event_loop().time()
logger.info(f"Synced services: {len(self.enabled_services)} enabled")
except Exception as e:
logger.error(f"Failed to sync services: {e}")
async def _enable_service(self, service_id: str, config: Dict) -> None:
"""Enable a service"""
try:
# Check if plugin exists
if service_id not in registry.list_plugins():
logger.warning(f"No plugin available for service: {service_id}")
return
# Load plugin
plugin = registry.load_plugin(service_id)
# Validate hardware requirements
await self._validate_hardware_requirements(plugin, config)
# Configure plugin if needed
if hasattr(plugin, 'configure'):
await plugin.configure(config.get('config', {}))
logger.info(f"Enabled service: {service_id}")
except Exception as e:
logger.error(f"Failed to enable service {service_id}: {e}")
async def _disable_service(self, service_id: str) -> None:
"""Disable a service"""
try:
# Unload plugin to free resources
registry.unload_plugin(service_id)
logger.info(f"Disabled service: {service_id}")
except Exception as e:
logger.error(f"Failed to disable service {service_id}: {e}")
async def _update_service(self, service_id: str, config: Dict) -> None:
"""Update service configuration"""
# For now, just disable and re-enable
await self._disable_service(service_id)
await self._enable_service(service_id, config)
async def _validate_hardware_requirements(self, plugin: ServicePlugin, config: Dict) -> None:
"""Validate that miner meets plugin requirements"""
requirements = plugin.get_hardware_requirements()
# This would check against actual miner hardware
# For now, just log the requirements
logger.debug(f"Hardware requirements for {plugin.service_id}: {requirements}")
async def _sync_loop(self) -> None:
"""Background sync loop"""
while True:
await asyncio.sleep(self._update_interval)
await self.sync_services()
async def execute_service(self, service_id: str, request: Dict) -> Dict:
"""Execute a service request"""
try:
# Check if service is enabled
if service_id not in self.enabled_services:
raise PluginNotFoundError(f"Service {service_id} is not enabled")
# Get plugin
plugin = registry.get_plugin(service_id)
if not plugin:
raise PluginNotFoundError(f"No plugin loaded for service: {service_id}")
# Execute request
result = await plugin.execute(request)
# Convert result to dict
return {
"success": result.success,
"data": result.data,
"error": result.error,
"metrics": result.metrics,
"execution_time": result.execution_time
}
except Exception as e:
logger.error(f"Failed to execute service {service_id}: {e}")
return {
"success": False,
"error": str(e)
}
def get_enabled_services(self) -> List[str]:
"""Get list of enabled services"""
return list(self.enabled_services)
def get_service_status(self) -> Dict[str, Dict]:
"""Get status of all services"""
status = {}
for service_id in registry.list_plugins():
plugin = registry.get_plugin(service_id)
status[service_id] = {
"enabled": service_id in self.enabled_services,
"loaded": plugin is not None,
"config": self.service_configs.get(service_id, {}),
"capabilities": plugin.capabilities if plugin else []
}
return status
async def health_check(self) -> Dict[str, bool]:
"""Health check all enabled services"""
results = {}
for service_id in self.enabled_services:
plugin = registry.get_plugin(service_id)
if plugin:
try:
results[service_id] = await plugin.health_check()
except Exception as e:
logger.error(f"Health check failed for {service_id}: {e}")
results[service_id] = False
else:
results[service_id] = False
return results
async def stop(self) -> None:
"""Stop the discovery service"""
logger.info("Stopping service discovery")
registry.cleanup_all()

View File

@@ -1,23 +0,0 @@
"""
Plugin system exceptions
"""
class PluginError(Exception):
"""Base exception for plugin errors"""
pass
class PluginNotFoundError(PluginError):
"""Raised when a plugin is not found"""
pass
class PluginValidationError(PluginError):
"""Raised when plugin validation fails"""
pass
class PluginExecutionError(PluginError):
"""Raised when plugin execution fails"""
pass

View File

@@ -1,318 +0,0 @@
"""
FFmpeg video processing plugin
"""
import asyncio
import os
import subprocess
import tempfile
from typing import Dict, Any, List
import time
from .base import ServicePlugin, PluginResult
from .exceptions import PluginExecutionError
class FFmpegPlugin(ServicePlugin):
"""Plugin for FFmpeg video processing"""
def __init__(self):
super().__init__()
self.service_id = "ffmpeg"
self.name = "FFmpeg Video Processing"
self.version = "1.0.0"
self.description = "Transcode and process video files using FFmpeg"
self.capabilities = ["transcode", "resize", "compress", "convert"]
def setup(self) -> None:
"""Initialize FFmpeg dependencies"""
# Check for ffmpeg installation
try:
subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True)
self.ffmpeg_path = "ffmpeg"
except (subprocess.CalledProcessError, FileNotFoundError):
raise PluginExecutionError("FFmpeg not found. Install FFmpeg for video processing")
# Check for NVIDIA GPU support
try:
result = subprocess.run(
["ffmpeg", "-hide_banner", "-encoders"],
capture_output=True,
text=True,
check=True
)
self.gpu_acceleration = "h264_nvenc" in result.stdout
except subprocess.CalledProcessError:
self.gpu_acceleration = False
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate FFmpeg request parameters"""
errors = []
# Check required parameters
if "input_url" not in request and "input_file" not in request:
errors.append("Either 'input_url' or 'input_file' must be provided")
# Validate output format
output_format = request.get("output_format", "mp4")
valid_formats = ["mp4", "avi", "mov", "mkv", "webm", "flv"]
if output_format not in valid_formats:
errors.append(f"Invalid output format. Must be one of: {', '.join(valid_formats)}")
# Validate codec
codec = request.get("codec", "h264")
valid_codecs = ["h264", "h265", "vp9", "av1", "mpeg4"]
if codec not in valid_codecs:
errors.append(f"Invalid codec. Must be one of: {', '.join(valid_codecs)}")
# Validate resolution
resolution = request.get("resolution")
if resolution:
valid_resolutions = ["720p", "1080p", "1440p", "4K", "8K"]
if resolution not in valid_resolutions:
errors.append(f"Invalid resolution. Must be one of: {', '.join(valid_resolutions)}")
# Validate bitrate
bitrate = request.get("bitrate")
if bitrate:
if not isinstance(bitrate, str) or not bitrate.endswith(("k", "M")):
errors.append("Bitrate must end with 'k' or 'M' (e.g., '1000k', '5M')")
# Validate frame rate
fps = request.get("fps")
if fps:
if not isinstance(fps, (int, float)) or fps < 1 or fps > 120:
errors.append("FPS must be between 1 and 120")
return errors
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for FFmpeg"""
return {
"gpu": "optional",
"vram_gb": 2,
"ram_gb": 8,
"storage_gb": 10
}
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute FFmpeg processing"""
start_time = time.time()
try:
# Validate request
errors = self.validate_request(request)
if errors:
return PluginResult(
success=False,
error=f"Validation failed: {'; '.join(errors)}"
)
# Get parameters
input_source = request.get("input_url") or request.get("input_file")
output_format = request.get("output_format", "mp4")
codec = request.get("codec", "h264")
resolution = request.get("resolution")
bitrate = request.get("bitrate")
fps = request.get("fps")
gpu_acceleration = request.get("gpu_acceleration", self.gpu_acceleration)
# Get input file
input_file = await self._get_input_file(input_source)
# Build FFmpeg command
cmd = self._build_ffmpeg_command(
input_file=input_file,
output_format=output_format,
codec=codec,
resolution=resolution,
bitrate=bitrate,
fps=fps,
gpu_acceleration=gpu_acceleration
)
# Execute FFmpeg
output_file = await self._execute_ffmpeg(cmd)
# Get output file info
output_info = await self._get_video_info(output_file)
# Clean up input file if downloaded
if input_source != request.get("input_file"):
os.unlink(input_file)
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"output_file": output_file,
"output_info": output_info,
"parameters": {
"codec": codec,
"resolution": resolution,
"bitrate": bitrate,
"fps": fps,
"gpu_acceleration": gpu_acceleration
}
},
metrics={
"input_size": os.path.getsize(input_file),
"output_size": os.path.getsize(output_file),
"compression_ratio": os.path.getsize(output_file) / os.path.getsize(input_file),
"processing_time": execution_time,
"real_time_factor": output_info.get("duration", 0) / execution_time if execution_time > 0 else 0
},
execution_time=execution_time
)
except Exception as e:
return PluginResult(
success=False,
error=str(e),
execution_time=time.time() - start_time
)
async def _get_input_file(self, source: str) -> str:
"""Get input file from URL or path"""
if source.startswith(("http://", "https://")):
# Download from URL
import requests
response = requests.get(source, stream=True)
response.raise_for_status()
# Save to temporary file
with tempfile.NamedTemporaryFile(delete=False, suffix=".mp4") as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
return f.name
else:
# Local file
if not os.path.exists(source):
raise PluginExecutionError(f"Input file not found: {source}")
return source
def _build_ffmpeg_command(
self,
input_file: str,
output_format: str,
codec: str,
resolution: Optional[str],
bitrate: Optional[str],
fps: Optional[float],
gpu_acceleration: bool
) -> List[str]:
"""Build FFmpeg command"""
cmd = [self.ffmpeg_path, "-i", input_file]
# Add codec
if gpu_acceleration and codec == "h264":
cmd.extend(["-c:v", "h264_nvenc"])
cmd.extend(["-preset", "fast"])
elif gpu_acceleration and codec == "h265":
cmd.extend(["-c:v", "hevc_nvenc"])
cmd.extend(["-preset", "fast"])
else:
cmd.extend(["-c:v", codec])
# Add resolution
if resolution:
resolution_map = {
"720p": ("1280", "720"),
"1080p": ("1920", "1080"),
"1440p": ("2560", "1440"),
"4K": ("3840", "2160"),
"8K": ("7680", "4320")
}
width, height = resolution_map.get(resolution, (None, None))
if width and height:
cmd.extend(["-s", f"{width}x{height}"])
# Add bitrate
if bitrate:
cmd.extend(["-b:v", bitrate])
cmd.extend(["-b:a", "128k"]) # Audio bitrate
# Add FPS
if fps:
cmd.extend(["-r", str(fps)])
# Add audio codec
cmd.extend(["-c:a", "aac"])
# Output file
output_file = tempfile.mktemp(suffix=f".{output_format}")
cmd.append(output_file)
return cmd
async def _execute_ffmpeg(self, cmd: List[str]) -> str:
"""Execute FFmpeg command"""
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
error_msg = stderr.decode() if stderr else "FFmpeg failed"
raise PluginExecutionError(f"FFmpeg error: {error_msg}")
# Output file is the last argument
return cmd[-1]
async def _get_video_info(self, video_file: str) -> Dict[str, Any]:
"""Get video file information"""
cmd = [
"ffprobe",
"-v", "quiet",
"-print_format", "json",
"-show_format",
"-show_streams",
video_file
]
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
if process.returncode != 0:
return {}
import json
probe_data = json.loads(stdout.decode())
# Extract relevant info
video_stream = next(
(s for s in probe_data.get("streams", []) if s.get("codec_type") == "video"),
{}
)
return {
"duration": float(probe_data.get("format", {}).get("duration", 0)),
"size": int(probe_data.get("format", {}).get("size", 0)),
"width": video_stream.get("width"),
"height": video_stream.get("height"),
"fps": eval(video_stream.get("r_frame_rate", "0/1")),
"codec": video_stream.get("codec_name"),
"bitrate": int(probe_data.get("format", {}).get("bit_rate", 0))
}
async def health_check(self) -> bool:
"""Check FFmpeg health"""
try:
result = subprocess.run(
["ffmpeg", "-version"],
capture_output=True,
check=True
)
return True
except subprocess.CalledProcessError:
return False

View File

@@ -1,321 +0,0 @@
"""
LLM inference plugin
"""
import asyncio
from typing import Dict, Any, List, Optional
import time
from .base import GPUPlugin, PluginResult
from .exceptions import PluginExecutionError
class LLMPlugin(GPUPlugin):
"""Plugin for Large Language Model inference"""
def __init__(self):
super().__init__()
self.service_id = "llm_inference"
self.name = "LLM Inference"
self.version = "1.0.0"
self.description = "Run inference on large language models"
self.capabilities = ["generate", "stream", "chat"]
self._model_cache = {}
def setup(self) -> None:
"""Initialize LLM dependencies"""
super().setup()
# Check for transformers installation
try:
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
self.transformers = AutoModelForCausalLM
self.AutoTokenizer = AutoTokenizer
self.pipeline = pipeline
except ImportError:
raise PluginExecutionError("Transformers not installed. Install with: pip install transformers accelerate")
# Check for torch
try:
import torch
self.torch = torch
except ImportError:
raise PluginExecutionError("PyTorch not installed. Install with: pip install torch")
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate LLM request parameters"""
errors = []
# Check required parameters
if "prompt" not in request:
errors.append("'prompt' is required")
# Validate model
model = request.get("model", "llama-7b")
valid_models = [
"llama-7b",
"llama-13b",
"mistral-7b",
"mixtral-8x7b",
"gpt-3.5-turbo",
"gpt-4"
]
if model not in valid_models:
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
# Validate max_tokens
max_tokens = request.get("max_tokens", 256)
if not isinstance(max_tokens, int) or max_tokens < 1 or max_tokens > 4096:
errors.append("max_tokens must be an integer between 1 and 4096")
# Validate temperature
temperature = request.get("temperature", 0.7)
if not isinstance(temperature, (int, float)) or temperature < 0.0 or temperature > 2.0:
errors.append("temperature must be between 0.0 and 2.0")
# Validate top_p
top_p = request.get("top_p")
if top_p is not None and (not isinstance(top_p, (int, float)) or top_p <= 0.0 or top_p > 1.0):
errors.append("top_p must be between 0.0 and 1.0")
return errors
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for LLM inference"""
return {
"gpu": "recommended",
"vram_gb": 8,
"ram_gb": 16,
"cuda": "recommended"
}
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute LLM inference"""
start_time = time.time()
try:
# Validate request
errors = self.validate_request(request)
if errors:
return PluginResult(
success=False,
error=f"Validation failed: {'; '.join(errors)}"
)
# Get parameters
prompt = request["prompt"]
model_name = request.get("model", "llama-7b")
max_tokens = request.get("max_tokens", 256)
temperature = request.get("temperature", 0.7)
top_p = request.get("top_p", 0.9)
do_sample = request.get("do_sample", True)
stream = request.get("stream", False)
# Load model and tokenizer
model, tokenizer = await self._load_model(model_name)
# Generate response
loop = asyncio.get_event_loop()
if stream:
# Streaming generation
generator = await loop.run_in_executor(
None,
lambda: self._generate_streaming(
model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample
)
)
# Collect all tokens
full_response = ""
tokens = []
for token in generator:
tokens.append(token)
full_response += token
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"text": full_response,
"tokens": tokens,
"streamed": True
},
metrics={
"model": model_name,
"prompt_tokens": len(tokenizer.encode(prompt)),
"generated_tokens": len(tokens),
"tokens_per_second": len(tokens) / execution_time if execution_time > 0 else 0
},
execution_time=execution_time
)
else:
# Regular generation
response = await loop.run_in_executor(
None,
lambda: self._generate(
model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample
)
)
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"text": response,
"streamed": False
},
metrics={
"model": model_name,
"prompt_tokens": len(tokenizer.encode(prompt)),
"generated_tokens": len(tokenizer.encode(response)) - len(tokenizer.encode(prompt)),
"tokens_per_second": (len(tokenizer.encode(response)) - len(tokenizer.encode(prompt))) / execution_time if execution_time > 0 else 0
},
execution_time=execution_time
)
except Exception as e:
return PluginResult(
success=False,
error=str(e),
execution_time=time.time() - start_time
)
async def _load_model(self, model_name: str):
"""Load LLM model and tokenizer with caching"""
if model_name not in self._model_cache:
loop = asyncio.get_event_loop()
# Map model names to HuggingFace model IDs
model_map = {
"llama-7b": "meta-llama/Llama-2-7b-chat-hf",
"llama-13b": "meta-llama/Llama-2-13b-chat-hf",
"mistral-7b": "mistralai/Mistral-7B-Instruct-v0.1",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"gpt-3.5-turbo": "openai-gpt", # Would need OpenAI API
"gpt-4": "openai-gpt-4" # Would need OpenAI API
}
hf_model = model_map.get(model_name, model_name)
# Load tokenizer
tokenizer = await loop.run_in_executor(
None,
lambda: self.AutoTokenizer.from_pretrained(hf_model)
)
# Load model
device = "cuda" if self.torch.cuda.is_available() else "cpu"
model = await loop.run_in_executor(
None,
lambda: self.transformers.from_pretrained(
hf_model,
torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32,
device_map="auto" if device == "cuda" else None,
load_in_4bit=True if device == "cuda" and self.vram_gb < 16 else False
)
)
self._model_cache[model_name] = (model, tokenizer)
return self._model_cache[model_name]
def _generate(
self,
model,
tokenizer,
prompt: str,
max_tokens: int,
temperature: float,
top_p: float,
do_sample: bool
) -> str:
"""Generate text without streaming"""
inputs = tokenizer(prompt, return_tensors="pt")
if self.torch.cuda.is_available():
inputs = {k: v.cuda() for k, v in inputs.items()}
with self.torch.no_grad():
outputs = model.generate(
**inputs,
max_new_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
do_sample=do_sample,
pad_token_id=tokenizer.eos_token_id
)
# Decode only the new tokens
new_tokens = outputs[0][inputs["input_ids"].shape[1]:]
response = tokenizer.decode(new_tokens, skip_special_tokens=True)
return response
def _generate_streaming(
self,
model,
tokenizer,
prompt: str,
max_tokens: int,
temperature: float,
top_p: float,
do_sample: bool
):
"""Generate text with streaming"""
inputs = tokenizer(prompt, return_tensors="pt")
if self.torch.cuda.is_available():
inputs = {k: v.cuda() for k, v in inputs.items()}
# Simple streaming implementation
# In production, you'd use model.generate with streamer
with self.torch.no_grad():
for i in range(max_tokens):
outputs = model.generate(
**inputs,
max_new_tokens=1,
temperature=temperature,
top_p=top_p,
do_sample=do_sample,
pad_token_id=tokenizer.eos_token_id
)
new_token = outputs[0][-1:]
text = tokenizer.decode(new_token, skip_special_tokens=True)
if text == tokenizer.eos_token:
break
yield text
# Update inputs for next iteration
inputs["input_ids"] = self.torch.cat([inputs["input_ids"], new_token], dim=1)
if "attention_mask" in inputs:
inputs["attention_mask"] = self.torch.cat([
inputs["attention_mask"],
self.torch.ones((1, 1), device=inputs["attention_mask"].device)
], dim=1)
async def health_check(self) -> bool:
"""Check LLM health"""
try:
# Try to load a small model
await self._load_model("mistral-7b")
return True
except Exception:
return False
def cleanup(self) -> None:
"""Cleanup resources"""
# Move models to CPU and clear cache
for model, _ in self._model_cache.values():
if hasattr(model, 'to'):
model.to("cpu")
self._model_cache.clear()
# Clear GPU cache
if self.torch.cuda.is_available():
self.torch.cuda.empty_cache()

View File

@@ -1,138 +0,0 @@
"""
Plugin registry for managing service plugins
"""
from typing import Dict, List, Type, Optional
import importlib
import inspect
import logging
from pathlib import Path
from .base import ServicePlugin
from .exceptions import PluginError, PluginNotFoundError
logger = logging.getLogger(__name__)
class PluginRegistry:
"""Registry for managing service plugins"""
def __init__(self):
self._plugins: Dict[str, ServicePlugin] = {}
self._plugin_classes: Dict[str, Type[ServicePlugin]] = {}
self._loaded = False
def register(self, plugin_class: Type[ServicePlugin]) -> None:
"""Register a plugin class"""
plugin_id = getattr(plugin_class, "service_id", plugin_class.__name__)
self._plugin_classes[plugin_id] = plugin_class
logger.info(f"Registered plugin class: {plugin_id}")
def load_plugin(self, service_id: str) -> ServicePlugin:
"""Load and instantiate a plugin"""
if service_id not in self._plugin_classes:
raise PluginNotFoundError(f"Plugin {service_id} not found")
if service_id in self._plugins:
return self._plugins[service_id]
try:
plugin_class = self._plugin_classes[service_id]
plugin = plugin_class()
plugin.setup()
self._plugins[service_id] = plugin
logger.info(f"Loaded plugin: {service_id}")
return plugin
except Exception as e:
logger.error(f"Failed to load plugin {service_id}: {e}")
raise PluginError(f"Failed to load plugin {service_id}: {e}")
def get_plugin(self, service_id: str) -> Optional[ServicePlugin]:
"""Get loaded plugin"""
return self._plugins.get(service_id)
def unload_plugin(self, service_id: str) -> None:
"""Unload a plugin"""
if service_id in self._plugins:
plugin = self._plugins[service_id]
plugin.cleanup()
del self._plugins[service_id]
logger.info(f"Unloaded plugin: {service_id}")
def list_plugins(self) -> List[str]:
"""List all registered plugin IDs"""
return list(self._plugin_classes.keys())
def list_loaded_plugins(self) -> List[str]:
"""List all loaded plugin IDs"""
return list(self._plugins.keys())
async def load_all_from_directory(self, plugin_dir: Path) -> None:
"""Load all plugins from a directory"""
if not plugin_dir.exists():
logger.warning(f"Plugin directory does not exist: {plugin_dir}")
return
for plugin_file in plugin_dir.glob("*.py"):
if plugin_file.name.startswith("_"):
continue
module_name = plugin_file.stem
try:
# Import the module
spec = importlib.util.spec_from_file_location(module_name, plugin_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Find plugin classes in the module
for name, obj in inspect.getmembers(module, inspect.isclass):
if (issubclass(obj, ServicePlugin) and
obj != ServicePlugin and
not name.startswith("_")):
self.register(obj)
logger.info(f"Auto-registered plugin from {module_name}: {name}")
except Exception as e:
logger.error(f"Failed to load plugin from {plugin_file}: {e}")
async def initialize(self, plugin_dir: Optional[Path] = None) -> None:
"""Initialize the plugin registry"""
if self._loaded:
return
# Load built-in plugins
from . import whisper, stable_diffusion, llm_inference, ffmpeg, blender
self.register(whisper.WhisperPlugin)
self.register(stable_diffusion.StableDiffusionPlugin)
self.register(llm_inference.LLMPlugin)
self.register(ffmpeg.FFmpegPlugin)
self.register(blender.BlenderPlugin)
# Load external plugins if directory provided
if plugin_dir:
await self.load_all_from_directory(plugin_dir)
self._loaded = True
logger.info(f"Plugin registry initialized with {len(self._plugin_classes)} plugins")
async def health_check_all(self) -> Dict[str, bool]:
"""Health check all loaded plugins"""
results = {}
for service_id, plugin in self._plugins.items():
try:
results[service_id] = await plugin.health_check()
except Exception as e:
logger.error(f"Health check failed for {service_id}: {e}")
results[service_id] = False
return results
def cleanup_all(self) -> None:
"""Cleanup all loaded plugins"""
for service_id in list(self._plugins.keys()):
self.unload_plugin(service_id)
logger.info("All plugins cleaned up")
# Global registry instance
registry = PluginRegistry()

View File

@@ -1,281 +0,0 @@
"""
Stable Diffusion image generation plugin
"""
import asyncio
import base64
import io
from typing import Dict, Any, List
import time
import numpy as np
from .base import GPUPlugin, PluginResult
from .exceptions import PluginExecutionError
class StableDiffusionPlugin(GPUPlugin):
"""Plugin for Stable Diffusion image generation"""
def __init__(self):
super().__init__()
self.service_id = "stable_diffusion"
self.name = "Stable Diffusion"
self.version = "1.0.0"
self.description = "Generate images from text prompts using Stable Diffusion"
self.capabilities = ["txt2img", "img2img"]
self._model_cache = {}
def setup(self) -> None:
"""Initialize Stable Diffusion dependencies"""
super().setup()
# Check for diffusers installation
try:
from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
self.diffusers = StableDiffusionPipeline
self.img2img_pipe = StableDiffusionImg2ImgPipeline
except ImportError:
raise PluginExecutionError("Diffusers not installed. Install with: pip install diffusers transformers accelerate")
# Check for torch
try:
import torch
self.torch = torch
except ImportError:
raise PluginExecutionError("PyTorch not installed. Install with: pip install torch")
# Check for PIL
try:
from PIL import Image
self.Image = Image
except ImportError:
raise PluginExecutionError("PIL not installed. Install with: pip install Pillow")
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate Stable Diffusion request parameters"""
errors = []
# Check required parameters
if "prompt" not in request:
errors.append("'prompt' is required")
# Validate model
model = request.get("model", "runwayml/stable-diffusion-v1-5")
valid_models = [
"runwayml/stable-diffusion-v1-5",
"stabilityai/stable-diffusion-2-1",
"stabilityai/stable-diffusion-xl-base-1.0"
]
if model not in valid_models:
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
# Validate dimensions
width = request.get("width", 512)
height = request.get("height", 512)
if not isinstance(width, int) or width < 256 or width > 1024:
errors.append("Width must be an integer between 256 and 1024")
if not isinstance(height, int) or height < 256 or height > 1024:
errors.append("Height must be an integer between 256 and 1024")
# Validate steps
steps = request.get("steps", 20)
if not isinstance(steps, int) or steps < 1 or steps > 100:
errors.append("Steps must be an integer between 1 and 100")
# Validate guidance scale
guidance_scale = request.get("guidance_scale", 7.5)
if not isinstance(guidance_scale, (int, float)) or guidance_scale < 1.0 or guidance_scale > 20.0:
errors.append("Guidance scale must be between 1.0 and 20.0")
# Check img2img requirements
if request.get("task") == "img2img":
if "init_image" not in request:
errors.append("'init_image' is required for img2img task")
strength = request.get("strength", 0.8)
if not isinstance(strength, (int, float)) or strength < 0.0 or strength > 1.0:
errors.append("Strength must be between 0.0 and 1.0")
return errors
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for Stable Diffusion"""
return {
"gpu": "required",
"vram_gb": 6,
"ram_gb": 8,
"cuda": "required"
}
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute Stable Diffusion generation"""
start_time = time.time()
try:
# Validate request
errors = self.validate_request(request)
if errors:
return PluginResult(
success=False,
error=f"Validation failed: {'; '.join(errors)}"
)
# Get parameters
prompt = request["prompt"]
negative_prompt = request.get("negative_prompt", "")
model_name = request.get("model", "runwayml/stable-diffusion-v1-5")
width = request.get("width", 512)
height = request.get("height", 512)
steps = request.get("steps", 20)
guidance_scale = request.get("guidance_scale", 7.5)
num_images = request.get("num_images", 1)
seed = request.get("seed")
task = request.get("task", "txt2img")
# Load model
pipe = await self._load_model(model_name)
# Generate images
loop = asyncio.get_event_loop()
if task == "img2img":
# Handle img2img
init_image_data = request["init_image"]
init_image = self._decode_image(init_image_data)
strength = request.get("strength", 0.8)
images = await loop.run_in_executor(
None,
lambda: pipe(
prompt=prompt,
negative_prompt=negative_prompt,
image=init_image,
strength=strength,
num_inference_steps=steps,
guidance_scale=guidance_scale,
num_images_per_prompt=num_images,
generator=self._get_generator(seed)
).images
)
else:
# Handle txt2img
images = await loop.run_in_executor(
None,
lambda: pipe(
prompt=prompt,
negative_prompt=negative_prompt,
width=width,
height=height,
num_inference_steps=steps,
guidance_scale=guidance_scale,
num_images_per_prompt=num_images,
generator=self._get_generator(seed)
).images
)
# Encode images to base64
encoded_images = []
for img in images:
buffer = io.BytesIO()
img.save(buffer, format="PNG")
encoded_images.append(base64.b64encode(buffer.getvalue()).decode())
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"images": encoded_images,
"count": len(images),
"parameters": {
"prompt": prompt,
"width": width,
"height": height,
"steps": steps,
"guidance_scale": guidance_scale,
"seed": seed
}
},
metrics={
"model": model_name,
"task": task,
"images_generated": len(images),
"generation_time": execution_time,
"time_per_image": execution_time / len(images)
},
execution_time=execution_time
)
except Exception as e:
return PluginResult(
success=False,
error=str(e),
execution_time=time.time() - start_time
)
async def _load_model(self, model_name: str):
"""Load Stable Diffusion model with caching"""
if model_name not in self._model_cache:
loop = asyncio.get_event_loop()
# Determine device
device = "cuda" if self.torch.cuda.is_available() else "cpu"
# Load with attention slicing for memory efficiency
pipe = await loop.run_in_executor(
None,
lambda: self.diffusers.from_pretrained(
model_name,
torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32,
safety_checker=None,
requires_safety_checker=False
)
)
pipe = pipe.to(device)
# Enable memory optimizations
if device == "cuda":
pipe.enable_attention_slicing()
if self.vram_gb < 8:
pipe.enable_model_cpu_offload()
self._model_cache[model_name] = pipe
return self._model_cache[model_name]
def _decode_image(self, image_data: str) -> 'Image':
"""Decode base64 image"""
if image_data.startswith('data:image'):
# Remove data URL prefix
image_data = image_data.split(',')[1]
image_bytes = base64.b64decode(image_data)
return self.Image.open(io.BytesIO(image_bytes))
def _get_generator(self, seed: Optional[int]):
"""Get torch generator for reproducible results"""
if seed is not None:
return self.torch.Generator().manual_seed(seed)
return None
async def health_check(self) -> bool:
"""Check Stable Diffusion health"""
try:
# Try to load a small model
pipe = await self._load_model("runwayml/stable-diffusion-v1-5")
return pipe is not None
except Exception:
return False
def cleanup(self) -> None:
"""Cleanup resources"""
# Move models to CPU and clear cache
for pipe in self._model_cache.values():
if hasattr(pipe, 'to'):
pipe.to("cpu")
self._model_cache.clear()
# Clear GPU cache
if self.torch.cuda.is_available():
self.torch.cuda.empty_cache()

View File

@@ -1,215 +0,0 @@
"""
Whisper speech recognition plugin
"""
import asyncio
import os
import tempfile
from typing import Dict, Any, List
import time
from .base import GPUPlugin, PluginResult
from .exceptions import PluginExecutionError
class WhisperPlugin(GPUPlugin):
"""Plugin for Whisper speech recognition"""
def __init__(self):
super().__init__()
self.service_id = "whisper"
self.name = "Whisper Speech Recognition"
self.version = "1.0.0"
self.description = "Transcribe and translate audio files using OpenAI Whisper"
self.capabilities = ["transcribe", "translate"]
self._model_cache = {}
def setup(self) -> None:
"""Initialize Whisper dependencies"""
super().setup()
# Check for whisper installation
try:
import whisper
self.whisper = whisper
except ImportError:
raise PluginExecutionError("Whisper not installed. Install with: pip install openai-whisper")
# Check for ffmpeg
import subprocess
try:
subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True)
except (subprocess.CalledProcessError, FileNotFoundError):
raise PluginExecutionError("FFmpeg not found. Install FFmpeg for audio processing")
def validate_request(self, request: Dict[str, Any]) -> List[str]:
"""Validate Whisper request parameters"""
errors = []
# Check required parameters
if "audio_url" not in request and "audio_file" not in request:
errors.append("Either 'audio_url' or 'audio_file' must be provided")
# Validate model
model = request.get("model", "base")
valid_models = ["tiny", "base", "small", "medium", "large", "large-v2", "large-v3"]
if model not in valid_models:
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
# Validate task
task = request.get("task", "transcribe")
if task not in ["transcribe", "translate"]:
errors.append("Task must be 'transcribe' or 'translate'")
# Validate language
if "language" in request:
language = request["language"]
if not isinstance(language, str) or len(language) != 2:
errors.append("Language must be a 2-letter language code (e.g., 'en', 'es')")
return errors
def get_hardware_requirements(self) -> Dict[str, Any]:
"""Get hardware requirements for Whisper"""
return {
"gpu": "recommended",
"vram_gb": 2,
"ram_gb": 4,
"storage_gb": 1
}
async def execute(self, request: Dict[str, Any]) -> PluginResult:
"""Execute Whisper transcription"""
start_time = time.time()
try:
# Validate request
errors = self.validate_request(request)
if errors:
return PluginResult(
success=False,
error=f"Validation failed: {'; '.join(errors)}"
)
# Get parameters
model_name = request.get("model", "base")
task = request.get("task", "transcribe")
language = request.get("language")
temperature = request.get("temperature", 0.0)
# Load or get cached model
model = await self._load_model(model_name)
# Get audio file
audio_path = await self._get_audio_file(request)
# Transcribe
loop = asyncio.get_event_loop()
if task == "translate":
result = await loop.run_in_executor(
None,
lambda: model.transcribe(
audio_path,
task="translate",
temperature=temperature
)
)
else:
result = await loop.run_in_executor(
None,
lambda: model.transcribe(
audio_path,
language=language,
temperature=temperature
)
)
# Clean up
if audio_path != request.get("audio_file"):
os.unlink(audio_path)
execution_time = time.time() - start_time
return PluginResult(
success=True,
data={
"text": result["text"],
"language": result.get("language"),
"segments": result.get("segments", [])
},
metrics={
"model": model_name,
"task": task,
"audio_duration": result.get("duration"),
"processing_time": execution_time,
"real_time_factor": result.get("duration", 0) / execution_time if execution_time > 0 else 0
},
execution_time=execution_time
)
except Exception as e:
return PluginResult(
success=False,
error=str(e),
execution_time=time.time() - start_time
)
async def _load_model(self, model_name: str):
"""Load Whisper model with caching"""
if model_name not in self._model_cache:
loop = asyncio.get_event_loop()
model = await loop.run_in_executor(
None,
lambda: self.whisper.load_model(model_name)
)
self._model_cache[model_name] = model
return self._model_cache[model_name]
async def _get_audio_file(self, request: Dict[str, Any]) -> str:
"""Get audio file from URL or direct file path"""
if "audio_file" in request:
return request["audio_file"]
# Download from URL
audio_url = request["audio_url"]
# Use requests to download
import requests
response = requests.get(audio_url, stream=True)
response.raise_for_status()
# Save to temporary file
suffix = self._get_audio_suffix(audio_url)
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
return f.name
def _get_audio_suffix(self, url: str) -> str:
"""Get file extension from URL"""
if url.endswith('.mp3'):
return '.mp3'
elif url.endswith('.wav'):
return '.wav'
elif url.endswith('.m4a'):
return '.m4a'
elif url.endswith('.flac'):
return '.flac'
else:
return '.mp3' # Default
async def health_check(self) -> bool:
"""Check Whisper health"""
try:
# Check if we can load the tiny model
await self._load_model("tiny")
return True
except Exception:
return False
def cleanup(self) -> None:
"""Cleanup resources"""
self._model_cache.clear()

View File

@@ -1,30 +0,0 @@
[tool.poetry]
name = "aitbc-miner-node"
version = "0.1.0"
description = "AITBC miner node daemon"
authors = ["AITBC Team"]
packages = [
{ include = "aitbc_miner", from = "src" }
]
[tool.poetry.dependencies]
python = "^3.11"
httpx = "^0.27.0"
pydantic = "^2.7.0"
pyyaml = "^6.0.1"
psutil = "^5.9.8"
aiosignal = "^1.3.1"
uvloop = { version = "^0.19.0", optional = true }
asyncio = { version = "^3.4.3", optional = true }
rich = "^13.7.1"
[tool.poetry.extras]
uvloop = ["uvloop"]
[tool.poetry.group.dev.dependencies]
pytest = "^8.2.0"
pytest-asyncio = "^0.23.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@@ -1 +0,0 @@
"""AITBC miner node package."""

View File

@@ -1 +0,0 @@
"""Control loop and background tasks for the miner node."""

View File

@@ -1,127 +0,0 @@
from __future__ import annotations
import asyncio
import json
from collections.abc import Callable
from typing import Optional
from ..config import settings
from ..logging import get_logger
from ..coordinator import CoordinatorClient
from ..util.probe import collect_capabilities, collect_runtime_metrics
from ..util.backoff import compute_backoff
from ..util.fs import ensure_workspace, write_json
from ..runners import get_runner
logger = get_logger(__name__)
class MinerControlLoop:
def __init__(self) -> None:
self._tasks: list[asyncio.Task[None]] = []
self._stop_event = asyncio.Event()
self._coordinator = CoordinatorClient()
self._capabilities_snapshot = collect_capabilities(settings.max_concurrent_cpu, settings.max_concurrent_gpu)
self._current_backoff = settings.poll_interval_seconds
async def start(self) -> None:
logger.info("Starting miner control loop", extra={"node_id": settings.node_id})
await self._register()
self._tasks.append(asyncio.create_task(self._heartbeat_loop()))
self._tasks.append(asyncio.create_task(self._poll_loop()))
async def stop(self) -> None:
logger.info("Stopping miner control loop")
self._stop_event.set()
for task in self._tasks:
task.cancel()
await asyncio.gather(*self._tasks, return_exceptions=True)
await self._coordinator.aclose()
async def _register(self) -> None:
payload = {
"capabilities": self._capabilities_snapshot.capabilities,
"concurrency": self._capabilities_snapshot.concurrency,
"region": settings.region,
}
try:
resp = await self._coordinator.register(payload)
logger.info("Registered miner", extra={"resp": resp})
except Exception as exc:
logger.exception("Failed to register miner", exc_info=exc)
raise
async def _heartbeat_loop(self) -> None:
interval = settings.heartbeat_interval_seconds
while not self._stop_event.is_set():
payload = {
"inflight": 0,
"status": "ONLINE",
"metadata": collect_runtime_metrics(),
}
try:
await self._coordinator.heartbeat(payload)
logger.debug("heartbeat sent")
except Exception as exc:
logger.warning("heartbeat failed", exc_info=exc)
await asyncio.sleep(interval)
async def _poll_loop(self) -> None:
interval = settings.poll_interval_seconds
while not self._stop_event.is_set():
payload = {"max_wait_seconds": interval}
try:
job = await self._coordinator.poll(payload)
if job:
logger.info("received job", extra={"job_id": job.get("job_id")})
self._current_backoff = settings.poll_interval_seconds
await self._handle_job(job)
else:
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
logger.debug("no job; next poll interval=%s", interval)
except Exception as exc:
logger.warning("poll failed", exc_info=exc)
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
await asyncio.sleep(interval)
async def _handle_job(self, job: dict) -> None:
job_id = job.get("job_id", "unknown")
workspace = ensure_workspace(settings.workspace_root, job_id)
runner_kind = job.get("runner", {}).get("kind", "noop")
runner = get_runner(runner_kind)
try:
result = await runner.run(job, workspace)
except Exception as exc:
logger.exception("runner crashed", extra={"job_id": job_id, "runner": runner_kind})
await self._coordinator.submit_failure(
job_id,
{
"error_code": "RUNTIME_ERROR",
"error_message": str(exc),
"metrics": {},
},
)
return
if result.ok:
write_json(workspace / "result.json", result.output)
try:
await self._coordinator.submit_result(
job_id,
{
"result": result.output,
"metrics": {"workspace": str(workspace)},
},
)
except Exception as exc:
logger.warning("failed to submit result", extra={"job_id": job_id}, exc_info=exc)
else:
await self._coordinator.submit_failure(
job_id,
{
"error_code": result.output.get("error_code", "FAILED"),
"error_message": result.output.get("error_message", "Job failed"),
"metrics": result.output.get("metrics", {}),
},
)

View File

@@ -1,40 +0,0 @@
from __future__ import annotations
from pathlib import Path
from typing import Optional
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class MinerSettings(BaseSettings):
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
node_id: str = "node-dev-1"
coordinator_base_url: str = "http://127.0.0.1:8011/v1"
auth_token: str = "REDACTED_MINER_KEY"
region: Optional[str] = None
workspace_root: Path = Field(default=Path("/var/lib/aitbc/miner/jobs"))
cache_root: Path = Field(default=Path("/var/lib/aitbc/miner/cache"))
heartbeat_interval_seconds: int = 15
heartbeat_jitter_pct: int = 10
heartbeat_timeout_seconds: int = 60
poll_interval_seconds: int = 3
max_backoff_seconds: int = 60
max_concurrent_cpu: int = 1
max_concurrent_gpu: int = 1
enable_cli_runner: bool = True
enable_python_runner: bool = True
allowlist_dir: Path = Field(default=Path("/etc/aitbc/miner/allowlist.d"))
log_level: str = "INFO"
log_path: Optional[Path] = None
settings = MinerSettings()

View File

@@ -1,76 +0,0 @@
from __future__ import annotations
import asyncio
from typing import Any, Dict, Optional
import httpx
from .config import MinerSettings, settings
from .logging import get_logger
logger = get_logger(__name__)
class CoordinatorClient:
"""Async HTTP client for interacting with the coordinator API."""
def __init__(self, cfg: MinerSettings | None = None) -> None:
self.cfg = cfg or settings
self._client: Optional[httpx.AsyncClient] = None
@property
def client(self) -> httpx.AsyncClient:
if self._client is None:
headers = {
"Authorization": f"Bearer {self.cfg.auth_token}",
"User-Agent": f"aitbc-miner/{self.cfg.node_id}",
}
timeout = httpx.Timeout(connect=5.0, read=30.0, write=10.0, pool=None)
self._client = httpx.AsyncClient(base_url=self.cfg.coordinator_base_url.rstrip("/"), headers=headers, timeout=timeout)
return self._client
async def aclose(self) -> None:
if self._client:
await self._client.aclose()
self._client = None
async def register(self, payload: Dict[str, Any]) -> Dict[str, Any]:
logger.debug("registering miner", extra={"payload": payload})
resp = await self.client.post("/miners/register", json=payload)
resp.raise_for_status()
return resp.json()
async def heartbeat(self, payload: Dict[str, Any]) -> Dict[str, Any]:
resp = await self.client.post("/miners/heartbeat", json=payload)
resp.raise_for_status()
return resp.json()
async def poll(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
resp = await self.client.post("/miners/poll", json=payload)
if resp.status_code == 204:
logger.debug("no job available")
return None
resp.raise_for_status()
return resp.json()
async def submit_result(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
resp = await self.client.post(f"/miners/{job_id}/result", json=payload)
resp.raise_for_status()
return resp.json()
async def submit_failure(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
resp = await self.client.post(f"/miners/{job_id}/fail", json=payload)
resp.raise_for_status()
return resp.json()
async def __aenter__(self) -> "CoordinatorClient":
_ = self.client
return self
async def __aexit__(self, exc_type, exc, tb) -> None:
await self.aclose()
async def backoff(base: float, max_seconds: float) -> float:
await asyncio.sleep(base)
return min(base * 2, max_seconds)

View File

@@ -1,25 +0,0 @@
from __future__ import annotations
import logging
from typing import Optional
from .config import settings
def configure_logging(level: Optional[str] = None, log_path: Optional[str] = None) -> None:
log_level = getattr(logging, (level or settings.log_level).upper(), logging.INFO)
handlers: list[logging.Handler] = [logging.StreamHandler()]
if log_path:
handlers.append(logging.FileHandler(log_path))
logging.basicConfig(
level=log_level,
format="%(asctime)s %(levelname)s %(name)s :: %(message)s",
handlers=handlers,
)
def get_logger(name: str) -> logging.Logger:
if not logging.getLogger().handlers:
configure_logging(settings.log_level, settings.log_path.as_posix() if settings.log_path else None)
return logging.getLogger(name)

View File

@@ -1,51 +0,0 @@
from __future__ import annotations
import asyncio
import signal
from contextlib import asynccontextmanager
from typing import AsyncIterator
from .config import settings
from .logging import get_logger
logger = get_logger(__name__)
class MinerApplication:
def __init__(self) -> None:
self._stop_event = asyncio.Event()
async def start(self) -> None:
logger.info("Miner node starting", extra={"node_id": settings.node_id})
# TODO: initialize capability probe, register with coordinator, start heartbeat and poll loops
await self._stop_event.wait()
async def stop(self) -> None:
logger.info("Miner node shutting down")
self._stop_event.set()
@asynccontextmanager
async def miner_app() -> AsyncIterator[MinerApplication]:
app = MinerApplication()
try:
yield app
finally:
await app.stop()
def run() -> None:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
async def _run() -> None:
async with miner_app() as app:
loop.add_signal_handler(signal.SIGINT, lambda: asyncio.create_task(app.stop()))
loop.add_signal_handler(signal.SIGTERM, lambda: asyncio.create_task(app.stop()))
await app.start()
loop.run_until_complete(_run())
if __name__ == "__main__": # pragma: no cover
run()

View File

@@ -1,20 +0,0 @@
from __future__ import annotations
from typing import Dict
from .base import BaseRunner
from .cli.simple import CLIRunner
from .python.noop import PythonNoopRunner
from .service import ServiceRunner
_RUNNERS: Dict[str, BaseRunner] = {
"cli": CLIRunner(),
"python": PythonNoopRunner(),
"noop": PythonNoopRunner(),
"service": ServiceRunner(),
}
def get_runner(kind: str) -> BaseRunner:
return _RUNNERS.get(kind, _RUNNERS["noop"])

View File

@@ -1,17 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict
@dataclass
class RunnerResult:
ok: bool
output: Dict[str, Any]
artifacts: Dict[str, Path] | None = None
class BaseRunner:
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
raise NotImplementedError

View File

@@ -1,62 +0,0 @@
from __future__ import annotations
import asyncio
from pathlib import Path
from typing import Any, Dict, List
from ..base import BaseRunner, RunnerResult
class CLIRunner(BaseRunner):
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
runner_cfg = job.get("runner", {})
command: List[str] = runner_cfg.get("command", [])
if not command:
return RunnerResult(
ok=False,
output={
"error_code": "INVALID_COMMAND",
"error_message": "runner.command is required for CLI jobs",
"metrics": {},
},
)
stdout_path = workspace / "stdout.log"
stderr_path = workspace / "stderr.log"
process = await asyncio.create_subprocess_exec(
*command,
cwd=str(workspace),
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout_bytes, stderr_bytes = await process.communicate()
stdout_path.write_bytes(stdout_bytes)
stderr_path.write_bytes(stderr_bytes)
if process.returncode == 0:
return RunnerResult(
ok=True,
output={
"exit_code": 0,
"stdout": stdout_path.name,
"stderr": stderr_path.name,
},
artifacts={
"stdout": stdout_path,
"stderr": stderr_path,
},
)
return RunnerResult(
ok=False,
output={
"error_code": "PROCESS_FAILED",
"error_message": f"command exited with code {process.returncode}",
"metrics": {
"exit_code": process.returncode,
"stderr": stderr_path.name,
},
},
)

View File

@@ -1,20 +0,0 @@
from __future__ import annotations
import asyncio
from pathlib import Path
from typing import Any, Dict
from ..base import BaseRunner, RunnerResult
class PythonNoopRunner(BaseRunner):
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
await asyncio.sleep(0)
payload = job.get("payload", {})
return RunnerResult(
ok=True,
output={
"echo": payload,
"message": "python noop runner executed",
},
)

View File

@@ -1,118 +0,0 @@
"""
Service runner for executing GPU service jobs via plugins
"""
import asyncio
import json
import sys
from pathlib import Path
from typing import Dict, Any, Optional
from .base import BaseRunner
from ...config import settings
from ...logging import get_logger
# Add plugins directory to path
plugins_path = Path(__file__).parent.parent.parent.parent / "plugins"
sys.path.insert(0, str(plugins_path))
try:
from plugins.discovery import ServiceDiscovery
except ImportError:
ServiceDiscovery = None
logger = get_logger(__name__)
class ServiceRunner(BaseRunner):
"""Runner for GPU service jobs using the plugin system"""
def __init__(self):
super().__init__()
self.discovery: Optional[ServiceDiscovery] = None
self._initialized = False
async def initialize(self) -> None:
"""Initialize the service discovery system"""
if self._initialized:
return
if ServiceDiscovery is None:
raise ImportError("ServiceDiscovery not available. Check plugin installation.")
# Create service discovery
pool_hub_url = getattr(settings, 'pool_hub_url', 'http://localhost:8001')
miner_id = getattr(settings, 'node_id', 'miner-1')
self.discovery = ServiceDiscovery(pool_hub_url, miner_id)
await self.discovery.start()
self._initialized = True
logger.info("Service runner initialized")
async def run(self, job: Dict[str, Any], workspace: Path) -> Dict[str, Any]:
"""Execute a service job"""
await self.initialize()
job_id = job.get("job_id", "unknown")
try:
# Extract service type and parameters
service_type = job.get("service_type")
if not service_type:
raise ValueError("Job missing service_type")
# Get service parameters from job
service_params = job.get("parameters", {})
logger.info(f"Executing service job", extra={
"job_id": job_id,
"service_type": service_type
})
# Execute via plugin system
result = await self.discovery.execute_service(service_type, service_params)
# Save result to workspace
result_file = workspace / "result.json"
with open(result_file, "w") as f:
json.dump(result, f, indent=2)
if result["success"]:
logger.info(f"Service job completed successfully", extra={
"job_id": job_id,
"execution_time": result.get("execution_time")
})
# Return success result
return {
"status": "completed",
"result": result["data"],
"metrics": result.get("metrics", {}),
"execution_time": result.get("execution_time")
}
else:
logger.error(f"Service job failed", extra={
"job_id": job_id,
"error": result.get("error")
})
# Return failure result
return {
"status": "failed",
"error": result.get("error", "Unknown error"),
"execution_time": result.get("execution_time")
}
except Exception as e:
logger.exception("Service runner failed", extra={"job_id": job_id})
return {
"status": "failed",
"error": str(e)
}
async def cleanup(self) -> None:
"""Cleanup resources"""
if self.discovery:
await self.discovery.stop()
self._initialized = False

View File

@@ -1,19 +0,0 @@
from __future__ import annotations
import asyncio
import random
def compute_backoff(base: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
backoff = min(base * factor, max_seconds)
jitter = backoff * (jitter_pct / 100.0)
return max(0.0, random.uniform(backoff - jitter, backoff + jitter))
def next_backoff(current: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
return compute_backoff(current, factor, jitter_pct, max_seconds)
async def sleep_with_backoff(delay: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
await asyncio.sleep(delay)
return next_backoff(delay, factor, jitter_pct, max_seconds)

View File

@@ -1,15 +0,0 @@
from __future__ import annotations
from pathlib import Path
def ensure_workspace(root: Path, job_id: str) -> Path:
path = root / job_id
path.mkdir(parents=True, exist_ok=True)
return path
def write_json(path: Path, data: dict) -> None:
import json
path.write_text(json.dumps(data, indent=2), encoding="utf-8")

View File

@@ -1,91 +0,0 @@
from __future__ import annotations
import platform
import shutil
import subprocess
import time
from dataclasses import dataclass
from typing import Any, Dict, List
import psutil
@dataclass
class CapabilitySnapshot:
capabilities: Dict[str, Any]
concurrency: int
region: str | None = None
def collect_capabilities(max_cpu_concurrency: int, max_gpu_concurrency: int) -> CapabilitySnapshot:
cpu_count = psutil.cpu_count(logical=True) or 1
total_mem = psutil.virtual_memory().total
gpu_info = _detect_gpus()
capabilities: Dict[str, Any] = {
"node": platform.node(),
"python_version": platform.python_version(),
"platform": platform.platform(),
"cpu": {
"logical_cores": cpu_count,
"model": platform.processor(),
},
"memory": {
"total_bytes": total_mem,
"total_gb": round(total_mem / (1024**3), 2),
},
"runners": {
"cli": True,
"python": True,
},
}
if gpu_info:
capabilities["gpus"] = gpu_info
concurrency = max(1, max_cpu_concurrency, max_gpu_concurrency if gpu_info else 0)
return CapabilitySnapshot(capabilities=capabilities, concurrency=concurrency)
def collect_runtime_metrics() -> Dict[str, Any]:
vm = psutil.virtual_memory()
load_avg = psutil.getloadavg() if hasattr(psutil, "getloadavg") else (0, 0, 0)
return {
"cpu_percent": psutil.cpu_percent(interval=None),
"load_avg": load_avg,
"memory_percent": vm.percent,
"timestamp": time.time(),
}
def _detect_gpus() -> List[Dict[str, Any]]:
nvidia_smi = shutil.which("nvidia-smi")
if not nvidia_smi:
return []
try:
output = subprocess.check_output(
[
nvidia_smi,
"--query-gpu=name,memory.total",
"--format=csv,noheader"
],
stderr=subprocess.DEVNULL,
text=True,
)
except (subprocess.CalledProcessError, FileNotFoundError):
return []
gpus: List[Dict[str, Any]] = []
for line in output.strip().splitlines():
parts = [p.strip() for p in line.split(",")]
if not parts:
continue
name = parts[0]
mem_mb = None
if len(parts) > 1 and parts[1].lower().endswith(" mib"):
try:
mem_mb = int(float(parts[1].split()[0]))
except ValueError:
mem_mb = None
gpus.append({"name": name, "memory_mb": mem_mb})
return gpus

View File

@@ -1,37 +0,0 @@
import asyncio
from pathlib import Path
import pytest
from aitbc_miner.runners.cli.simple import CLIRunner
from aitbc_miner.runners.python.noop import PythonNoopRunner
@pytest.mark.asyncio
async def test_python_noop_runner(tmp_path: Path):
runner = PythonNoopRunner()
job = {"payload": {"value": 42}}
result = await runner.run(job, tmp_path)
assert result.ok
assert result.output["echo"] == job["payload"]
@pytest.mark.asyncio
async def test_cli_runner_success(tmp_path: Path):
runner = CLIRunner()
job = {"runner": {"command": ["echo", "hello"]}}
result = await runner.run(job, tmp_path)
assert result.ok
assert result.artifacts is not None
stdout_path = result.artifacts["stdout"]
assert stdout_path.exists()
assert stdout_path.read_text().strip() == "hello"
@pytest.mark.asyncio
async def test_cli_runner_invalid_command(tmp_path: Path):
runner = CLIRunner()
job = {"runner": {}}
result = await runner.run(job, tmp_path)
assert not result.ok
assert result.output["error_code"] == "INVALID_COMMAND"

View File

@@ -1,245 +0,0 @@
#!/usr/bin/env python3
"""
AITBC Wallet CLI - A command-line wallet for AITBC blockchain
"""
import argparse
import json
import sys
import os
from pathlib import Path
import httpx
from datetime import datetime
# Configuration
BLOCKCHAIN_RPC = "http://127.0.0.1:9080"
WALLET_DIR = Path.home() / ".aitbc" / "wallets"
def print_header():
"""Print wallet CLI header"""
print("=" * 50)
print(" AITBC Blockchain Wallet CLI")
print("=" * 50)
def check_blockchain_connection():
"""Check if connected to blockchain"""
# First check if node is running by checking metrics
try:
response = httpx.get(f"{BLOCKCHAIN_RPC}/metrics", timeout=5.0)
if response.status_code == 200:
# Node is running, now try RPC
try:
rpc_response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/head", timeout=5.0)
if rpc_response.status_code == 200:
data = rpc_response.json()
return True, data.get("height", "unknown"), data.get("hash", "unknown")[:16] + "..."
else:
return False, f"RPC endpoint error (HTTP {rpc_response.status_code})", "node_running"
except Exception as e:
return False, f"RPC error: {str(e)}", "node_running"
return False, f"Node not responding (HTTP {response.status_code})", None
except Exception as e:
return False, str(e), None
def get_balance(address):
"""Get balance for an address"""
try:
response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/getBalance/{address}", timeout=5.0)
if response.status_code == 200:
return response.json()
return {"error": f"HTTP {response.status_code}"}
except Exception as e:
return {"error": str(e)}
def list_wallets():
"""List local wallets"""
WALLET_DIR.mkdir(parents=True, exist_ok=True)
wallets = []
for wallet_file in WALLET_DIR.glob("*.json"):
try:
with open(wallet_file, 'r') as f:
data = json.load(f)
wallets.append({
"id": wallet_file.stem,
"address": data.get("address", "unknown"),
"public_key": data.get("public_key", "unknown"),
"created": data.get("created_at", "unknown")
})
except Exception as e:
continue
return wallets
def create_wallet(wallet_id, address=None):
"""Create a new wallet file"""
WALLET_DIR.mkdir(parents=True, exist_ok=True)
wallet_file = WALLET_DIR / f"{wallet_id}.json"
if wallet_file.exists():
return False, "Wallet already exists"
# Generate a mock address if not provided
if not address:
address = f"aitbc1{wallet_id}{'x' * (40 - len(wallet_id))}"
# Generate a mock public key
public_key = f"0x{'1234567890abcdef' * 4}"
wallet_data = {
"wallet_id": wallet_id,
"address": address,
"public_key": public_key,
"created_at": datetime.now().isoformat() + "Z",
"note": "This is a demo wallet file - not for production use"
}
try:
with open(wallet_file, 'w') as f:
json.dump(wallet_data, f, indent=2)
return True, f"Wallet created: {wallet_file}"
except Exception as e:
return False, str(e)
def get_block_info(height=None):
try:
if height:
url = f"{BLOCKCHAIN_RPC}/rpc/blocks/{height}"
else:
url = f"{BLOCKCHAIN_RPC}/rpc/head"
response = httpx.get(url, timeout=5.0)
if response.status_code == 200:
return response.json()
return {"error": f"HTTP {response.status_code}"}
except Exception as e:
return {"error": str(e)}
def main():
parser = argparse.ArgumentParser(
description="AITBC Blockchain Wallet CLI",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
%(prog)s status Check blockchain connection
%(prog)s list List all local wallets
%(prog)s balance <address> Get balance of an address
%(prog)s block Show latest block info
%(prog)s block <height> Show specific block info
"""
)
subparsers = parser.add_subparsers(dest="command", help="Available commands")
# Status command
status_parser = subparsers.add_parser("status", help="Check blockchain connection status")
# List command
list_parser = subparsers.add_parser("list", help="List all local wallets")
# Balance command
balance_parser = subparsers.add_parser("balance", help="Get balance for an address")
balance_parser.add_argument("address", help="Wallet address to check")
# Block command
block_parser = subparsers.add_parser("block", help="Get block information")
block_parser.add_argument("height", nargs="?", type=int, help="Block height (optional)")
# Create command
create_parser = subparsers.add_parser("create", help="Create a new wallet file")
create_parser.add_argument("wallet_id", help="Wallet identifier")
create_parser.add_argument("--address", help="Wallet address")
args = parser.parse_args()
if not args.command:
print_header()
parser.print_help()
return
if args.command == "status":
print_header()
print("Checking blockchain connection...\n")
connected, info, block_hash = check_blockchain_connection()
if connected:
print(f"✅ Status: CONNECTED")
print(f"📦 Node: {BLOCKCHAIN_RPC}")
print(f"🔗 Latest Block: #{info}")
print(f"🧮 Block Hash: {block_hash}")
print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
elif block_hash == "node_running":
print(f"⚠️ Status: NODE RUNNING - RPC UNAVAILABLE")
print(f"📦 Node: {BLOCKCHAIN_RPC}")
print(f"❌ RPC Error: {info}")
print(f"💡 The blockchain node is running but RPC endpoints are not working")
print(f" This might be due to initialization or database issues")
print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
else:
print(f"❌ Status: DISCONNECTED")
print(f"📦 Node: {BLOCKCHAIN_RPC}")
print(f"⚠️ Error: {info}")
print(f"💡 Make sure the blockchain node is running on port 9080")
elif args.command == "list":
print_header()
wallets = list_wallets()
if wallets:
print(f"Found {len(wallets)} wallet(s) in {WALLET_DIR}:\n")
for w in wallets:
print(f"🔐 Wallet ID: {w['id']}")
print(f" Address: {w['address']}")
print(f" Public Key: {w['public_key'][:20]}...")
print(f" Created: {w['created']}")
print()
else:
print(f"No wallets found in {WALLET_DIR}")
print("\n💡 To create a wallet, use the wallet-daemon service")
elif args.command == "balance":
print_header()
print(f"Checking balance for address: {args.address}\n")
result = get_balance(args.address)
if "error" in result:
print(f"❌ Error: {result['error']}")
else:
balance = result.get("balance", 0)
print(f"💰 Balance: {balance} AITBC")
print(f"📍 Address: {args.address}")
elif args.command == "block":
print_header()
if args.height:
print(f"Getting block #{args.height}...\n")
else:
print("Getting latest block...\n")
result = get_block_info(args.height)
if "error" in result:
print(f"❌ Error: {result['error']}")
else:
print(f"📦 Block Height: {result.get('height', 'unknown')}")
print(f"🧮 Block Hash: {result.get('hash', 'unknown')}")
print(f"⏰ Timestamp: {result.get('timestamp', 'unknown')}")
print(f"👤 Proposer: {result.get('proposer', 'unknown')}")
print(f"📊 Transactions: {len(result.get('transactions', []))}")
elif args.command == "create":
print_header()
success, message = create_wallet(args.wallet_id, args.address)
if success:
print(f"✅ {message}")
print(f"\nWallet Details:")
print(f" ID: {args.wallet_id}")
print(f" Address: {args.address or f'aitbc1{args.wallet_id}...'}")
print(f"\n💡 This is a demo wallet file for testing purposes")
print(f" Use 'aitbc-wallet list' to see all wallets")
else:
print(f"❌ Error: {message}")
else:
parser.print_help()
if __name__ == "__main__":
main()

View File

@@ -1,102 +0,0 @@
.TH AITBC-WALLET "1" "December 2025" "AITBC Wallet CLI" "User Commands"
.SH NAME
aitbc-wallet \- AITBC Blockchain Wallet Command Line Interface
.SH SYNOPSIS
.B aitbc-wallet
[\fIOPTIONS\fR] \fICOMMAND\fR [\fIARGS\fR]
.SH DESCRIPTION
The AITBC Wallet CLI is a command-line tool for interacting with the AITBC blockchain. It allows you to manage wallets, check balances, and monitor blockchain status without exposing your wallet to web interfaces.
.SH COMMANDS
.TP
\fBstatus\fR
Check if the wallet is connected to the AITBC blockchain node.
.TP
\fBlist\fR
List all local wallets stored in ~/.aitbc/wallets/.
.TP
\fBbalance\fR \fIADDRESS\fR
Get the AITBC token balance for the specified address.
.TP
\fBblock\fR [\fIHEIGHT\fR]
Show information about the latest block or a specific block height.
.SH EXAMPLES
Check blockchain connection status:
.P
.RS 4
.nf
$ aitbc-wallet status
==================================================
AITBC Blockchain Wallet CLI
==================================================
Checking blockchain connection...
✅ Status: CONNECTED
📦 Node: http://127.0.0.1:9080
🔗 Latest Block: #42
🧮 Block Hash: 0x1234...abcd
⏰ Checked at: 2025-12-28 10:30:00
.fi
.RE
.P
List all wallets:
.P
.RS 4
.nf
$ aitbc-wallet list
==================================================
AITBC Blockchain Wallet CLI
==================================================
Found 1 wallet(s) in /home/user/.aitbc/wallets:
🔐 Wallet ID: demo-wallet
Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
Public Key: 0x3aaa0a91f69d886a90...
Created: 2025-12-28T10:30:00Z
.fi
.RE
.P
Check wallet balance:
.P
.RS 4
.nf
$ aitbc-wallet balance aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
==================================================
AITBC Blockchain Wallet CLI
==================================================
Checking balance for address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
💰 Balance: 1000 AITBC
📍 Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
.fi
.RE
.SH FILES
.TP
.I ~/.aitbc/wallets/
Directory where local wallet files are stored.
.TP
.I /usr/local/bin/aitbc-wallet
The wallet CLI executable.
.SH ENVIRONMENT
.TP
.I BLOCKCHAIN_RPC
The blockchain node RPC URL (default: http://127.0.0.1:9080).
.SH SECURITY
.P
The wallet CLI is designed with security in mind:
.RS 4
.IP \(bu 4
No web interface - purely command-line based
.IP \(bu 4
Wallets stored locally in encrypted format
.IP \(bu 4
Only connects to localhost blockchain node by default
.IP \(bu 4
No exposure of private keys to network services
.RE
.SH BUGS
Report bugs to the AITBC project issue tracker.
.SH SEE ALSO
.BR aitbc-blockchain (8),
.BR aitbc-coordinator (8)
.SH AUTHOR
AITBC Development Team

View File

@@ -1,256 +0,0 @@
#!/usr/bin/env python3
"""
AITBC Wallet CLI - Command Line Interface for AITBC Blockchain Wallet
"""
import argparse
import sys
import json
import os
from pathlib import Path
from typing import Optional
import httpx
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "wallet-daemon" / "src"))
from app.keystore.service import KeystoreService
from app.ledger_mock import SQLiteLedgerAdapter
from app.settings import Settings
class AITBCWallet:
"""AITBC Blockchain Wallet CLI"""
def __init__(self, wallet_dir: str = None):
self.wallet_dir = Path(wallet_dir or os.path.expanduser("~/.aitbc/wallets"))
self.wallet_dir.mkdir(parents=True, exist_ok=True)
self.keystore = KeystoreService()
self.blockchain_rpc = "http://127.0.0.1:9080" # Default blockchain node RPC
def _get_wallet_path(self, wallet_id: str) -> Path:
"""Get the path to a wallet file"""
return self.wallet_dir / f"{wallet_id}.wallet"
def create_wallet(self, wallet_id: str, password: str) -> dict:
"""Create a new wallet"""
wallet_path = self._get_wallet_path(wallet_id)
if wallet_path.exists():
return {"error": "Wallet already exists"}
# Generate keypair
keypair = self.keystore.generate_keypair()
# Store encrypted wallet
wallet_data = {
"wallet_id": wallet_id,
"public_key": keypair["public_key"],
"encrypted_private_key": keypair["encrypted_private_key"],
"salt": keypair["salt"]
}
# Encrypt and save
self.keystore.save_wallet(wallet_path, wallet_data, password)
return {
"wallet_id": wallet_id,
"public_key": keypair["public_key"],
"status": "created"
}
def list_wallets(self) -> list:
"""List all wallet addresses"""
wallets = []
for wallet_file in self.wallet_dir.glob("*.wallet"):
try:
wallet_id = wallet_file.stem
# Try to read public key without decrypting
with open(wallet_file, 'rb') as f:
# This is simplified - in real implementation, we'd read metadata
wallets.append({
"wallet_id": wallet_id,
"address": f"0x{wallet_id[:8]}...", # Simplified address format
"path": str(wallet_file)
})
except Exception:
continue
return wallets
def get_balance(self, wallet_id: str, password: str) -> dict:
"""Get wallet balance from blockchain"""
# First unlock wallet to get public key
wallet_path = self._get_wallet_path(wallet_id)
if not wallet_path.exists():
return {"error": "Wallet not found"}
try:
wallet_data = self.keystore.load_wallet(wallet_path, password)
public_key = wallet_data["public_key"]
# Query blockchain for balance
try:
with httpx.Client() as client:
response = client.get(
f"{self.blockchain_rpc}/v1/balances/{public_key}",
timeout=5.0
)
if response.status_code == 200:
return response.json()
else:
return {"error": "Failed to query blockchain", "status": response.status_code}
except Exception as e:
return {"error": f"Cannot connect to blockchain: {str(e)}"}
except Exception as e:
return {"error": f"Failed to unlock wallet: {str(e)}"}
def check_connection(self) -> dict:
"""Check if connected to blockchain"""
try:
with httpx.Client() as client:
# Try to get the latest block
response = client.get(f"{self.blockchain_rpc}/v1/blocks/head", timeout=5.0)
if response.status_code == 200:
block = response.json()
return {
"connected": True,
"blockchain_url": self.blockchain_rpc,
"latest_block": block.get("height", "unknown"),
"status": "connected"
}
else:
return {
"connected": False,
"error": f"HTTP {response.status_code}",
"status": "disconnected"
}
except Exception as e:
return {
"connected": False,
"error": str(e),
"status": "disconnected"
}
def send_transaction(self, wallet_id: str, password: str, to_address: str, amount: float) -> dict:
"""Send transaction"""
wallet_path = self._get_wallet_path(wallet_id)
if not wallet_path.exists():
return {"error": "Wallet not found"}
try:
# Unlock wallet
wallet_data = self.keystore.load_wallet(wallet_path, password)
private_key = wallet_data["private_key"]
# Create transaction
transaction = {
"from": wallet_data["public_key"],
"to": to_address,
"amount": amount,
"nonce": 0 # Would get from blockchain
}
# Sign transaction
signature = self.keystore.sign_transaction(private_key, transaction)
transaction["signature"] = signature
# Send to blockchain
with httpx.Client() as client:
response = client.post(
f"{self.blockchain_rpc}/v1/transactions",
json=transaction,
timeout=5.0
)
if response.status_code == 200:
return response.json()
else:
return {"error": f"Failed to send transaction: {response.text}"}
except Exception as e:
return {"error": str(e)}
def main():
"""Main CLI entry point"""
parser = argparse.ArgumentParser(description="AITBC Blockchain Wallet CLI")
parser.add_argument("--wallet-dir", default=None, help="Wallet directory path")
subparsers = parser.add_subparsers(dest="command", help="Available commands")
# Create wallet
create_parser = subparsers.add_parser("create", help="Create a new wallet")
create_parser.add_argument("wallet_id", help="Wallet identifier")
create_parser.add_argument("password", help="Wallet password")
# List wallets
subparsers.add_parser("list", help="List all wallets")
# Get balance
balance_parser = subparsers.add_parser("balance", help="Get wallet balance")
balance_parser.add_argument("wallet_id", help="Wallet identifier")
balance_parser.add_argument("password", help="Wallet password")
# Check connection
subparsers.add_parser("status", help="Check blockchain connection status")
# Send transaction
send_parser = subparsers.add_parser("send", help="Send transaction")
send_parser.add_argument("wallet_id", help="Wallet identifier")
send_parser.add_argument("password", help="Wallet password")
send_parser.add_argument("to_address", help="Recipient address")
send_parser.add_argument("amount", type=float, help="Amount to send")
args = parser.parse_args()
if not args.command:
parser.print_help()
return
wallet = AITBCWallet(args.wallet_dir)
if args.command == "create":
result = wallet.create_wallet(args.wallet_id, args.password)
if "error" in result:
print(f"Error: {result['error']}", file=sys.stderr)
else:
print(f"Wallet created successfully!")
print(f"Wallet ID: {result['wallet_id']}")
print(f"Public Key: {result['public_key']}")
elif args.command == "list":
wallets = wallet.list_wallets()
if wallets:
print("Available wallets:")
for w in wallets:
print(f" - {w['wallet_id']}: {w['address']}")
else:
print("No wallets found")
elif args.command == "balance":
result = wallet.get_balance(args.wallet_id, args.password)
if "error" in result:
print(f"Error: {result['error']}", file=sys.stderr)
else:
print(f"Balance: {result.get('balance', 'unknown')}")
elif args.command == "status":
result = wallet.check_connection()
if result["connected"]:
print(f"✓ Connected to blockchain at {result['blockchain_url']}")
print(f" Latest block: {result['latest_block']}")
else:
print(f"✗ Not connected: {result['error']}")
elif args.command == "send":
result = wallet.send_transaction(args.wallet_id, args.password, args.to_address, args.amount)
if "error" in result:
print(f"Error: {result['error']}", file=sys.stderr)
else:
print(f"Transaction sent: {result.get('tx_hash', 'unknown')}")
if __name__ == "__main__":
main()

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env python3
"""
Simple AITBC Wallet CLI
"""
import argparse
import json
import sys
import os
from pathlib import Path
import httpx
import getpass
def check_blockchain_connection():
"""Check if connected to blockchain"""
try:
response = httpx.get("http://127.0.0.1:9080/rpc/head", timeout=5.0)
if response.status_code == 200:
data = response.json()
return True, data.get("height", "unknown")
return False, f"HTTP {response.status_code}"
except Exception as e:
return False, str(e)
def get_balance(address):
"""Get balance for an address"""
try:
response = httpx.get(f"http://127.0.0.1:9080/rpc/getBalance/{address}", timeout=5.0)
if response.status_code == 200:
return response.json()
return {"error": f"HTTP {response.status_code}"}
except Exception as e:
return {"error": str(e)}
def list_wallets():
"""List local wallets"""
wallet_dir = Path.home() / ".aitbc" / "wallets"
wallet_dir.mkdir(parents=True, exist_ok=True)
wallets = []
for wallet_file in wallet_dir.glob("*.json"):
try:
with open(wallet_file, 'r') as f:
data = json.load(f)
wallets.append({
"id": wallet_file.stem,
"address": data.get("address", "unknown"),
"public_key": data.get("public_key", "unknown")[:20] + "..."
})
except:
continue
return wallets
def main():
parser = argparse.ArgumentParser(description="AITBC Wallet CLI")
subparsers = parser.add_subparsers(dest="command", help="Commands")
# Status command
subparsers.add_parser("status", help="Check blockchain connection")
# List command
subparsers.add_parser("list", help="List wallets")
# Balance command
balance_parser = subparsers.add_parser("balance", help="Get balance")
balance_parser.add_argument("address", help="Wallet address")
args = parser.parse_args()
if args.command == "status":
connected, info = check_blockchain_connection()
if connected:
print(f"✓ Connected to AITBC Blockchain")
print(f" Latest block: {info}")
print(f" Node: http://127.0.0.1:9080")
else:
print(f"✗ Not connected: {info}")
elif args.command == "list":
wallets = list_wallets()
if wallets:
print("Local wallets:")
for w in wallets:
print(f" {w['id']}: {w['address']}")
else:
print("No wallets found")
print(f"Wallet directory: {Path.home() / '.aitbc' / 'wallets'}")
elif args.command == "balance":
result = get_balance(args.address)
if "error" in result:
print(f"Error: {result['error']}")
else:
balance = result.get("balance", 0)
print(f"Balance: {balance} AITBC")
else:
parser.print_help()
if __name__ == "__main__":
main()

View File

@@ -1,25 +0,0 @@
[Unit]
Description=AITBC Miner Node
After=network-online.target
Wants=network-online.target
[Service]
Type=simple
User=aitbc
Group=aitbc
WorkingDirectory=/opt/aitbc/apps/miner-node
EnvironmentFile=/opt/aitbc/apps/miner-node/.env
ExecStart=/opt/aitbc/apps/miner-node/.venv/bin/python -m aitbc_miner.main
Restart=always
RestartSec=3
Nice=5
IOSchedulingClass=best-effort
IOSchedulingPriority=6
NoNewPrivileges=true
PrivateTmp=true
ProtectSystem=full
ProtectHome=true
ReadWritePaths=/opt/aitbc/apps/miner-node /var/log/aitbc
[Install]
WantedBy=multi-user.target

View File

@@ -1,115 +0,0 @@
name: Deploy Documentation
on:
push:
branches: [ main, develop ]
paths: [ 'docs/**' ]
pull_request:
branches: [ main ]
paths: [ 'docs/**' ]
workflow_dispatch:
permissions:
contents: read
pages: write
id-token: write
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
pip install -r docs/requirements.txt
- name: Generate OpenAPI specs
run: |
cd docs
python scripts/generate_openapi.py
- name: Build documentation
run: |
cd docs
mkdocs build --strict
- name: Upload artifact
uses: actions/upload-pages-artifact@v2
with:
path: docs/site
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
if: github.ref == 'refs/heads/main'
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2
# Deploy to staging for develop branch
deploy-staging:
runs-on: ubuntu-latest
needs: build
if: github.ref == 'refs/heads/develop'
steps:
- name: Deploy to Staging
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs/site
destination_dir: staging
user_name: github-actions[bot]
user_email: github-actions[bot]@users.noreply.github.com
# Deploy to production S3
deploy-production:
runs-on: ubuntu-latest
needs: build
if: github.ref == 'refs/heads/main'
environment: production
steps:
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Deploy to S3
run: |
aws s3 sync docs/site/ s3://docs.aitbc.io/ --delete
aws cloudfront create-invalidation --distribution-id ${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }} --paths "/*"
# Notify on deployment
notify:
runs-on: ubuntu-latest
needs: [deploy, deploy-production]
if: always()
steps:
- name: Notify Discord
uses: rjstone/discord-webhook-notify@v1
with:
severity: info
text: "Documentation deployment completed"
description: |
Build: ${{ needs.build.result }}
Deploy: ${{ needs.deploy.result }}
Production: ${{ needs.deploy-production.result }}
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}

View File

@@ -1,87 +0,0 @@
# .pages configuration for awesome-pages plugin
home: index.md
format: standard
ordering:
asc: title
sections:
- title: Getting Started
icon: material/rocket-launch
children:
- getting-started/introduction.md
- getting-started/quickstart.md
- getting-started/installation.md
- getting-started/architecture.md
- title: User Guide
icon: material/account-group
children:
- user-guide/overview.md
- user-guide/creating-jobs.md
- user-guide/marketplace.md
- user-guide/explorer.md
- user-guide/wallet-management.md
- title: Developer Guide
icon: material/code-tags
children:
- developer-guide/overview.md
- developer-guide/setup.md
- developer-guide/api-authentication.md
- title: SDKs
icon: material/package-variant
children:
- developer-guide/sdks/python.md
- developer-guide/sdks/javascript.md
- developer-guide/examples.md
- developer-guide/contributing.md
- title: API Reference
icon: material/api
children:
- title: Coordinator API
icon: material/server
children:
- api/coordinator/overview.md
- api/coordinator/authentication.md
- api/coordinator/endpoints.md
- api/coordinator/openapi.md
- title: Blockchain Node API
icon: material/link-variant
children:
- api/blockchain/overview.md
- api/blockchain/websocket.md
- api/blockchain/jsonrpc.md
- api/blockchain/openapi.md
- title: Wallet Daemon API
icon: material/wallet
children:
- api/wallet/overview.md
- api/wallet/endpoints.md
- api/wallet/openapi.md
- title: Operations
icon: material/cog
children:
- operations/deployment.md
- operations/monitoring.md
- operations/security.md
- operations/backup-restore.md
- operations/troubleshooting.md
- title: Tutorials
icon: material/school
children:
- tutorials/building-dapp.md
- tutorials/mining-setup.md
- tutorials/running-node.md
- tutorials/integration-examples.md
- title: Resources
icon: material/information
children:
- resources/glossary.md
- resources/faq.md
- resources/support.md
- resources/changelog.md

View File

@@ -219,7 +219,7 @@ This document tracks components that have been successfully deployed and are ope
- Documented common issues, troubleshooting, and performance metrics
-**Documentation Updates**
- Updated `docs/localhost-testing-scenario.md` with CLI wrapper usage
- Updated `docs/developer/testing/localhost-testing-scenario.md` with CLI wrapper usage
- Converted all examples to use localhost URLs (127.0.0.1) instead of production
- Added host user paths and quick start commands
- Documented complete testing workflow from setup to verification
@@ -268,3 +268,31 @@ This document tracks components that have been successfully deployed and are ope
-**Comprehensive .gitignore**
- Expanded from 39 to 145 lines with organized sections
- Added project-specific rules for coordinator, explorer, GPU miner
### Repository File Audit & Cleanup
-**File Audit Document** (`docs/files.md`)
- Created comprehensive audit of all 849 repository files
- Categorized into Whitelist (60), Greylist (0), Placeholders (12), Removed (35)
- All greylist items resolved - no pending reviews
-**Abandoned Folders Removed** (35 items total)
- `ecosystem*/` (4 folders), `enterprise-connectors/`, `research/`
- `apps/client-web/`, `apps/marketplace-ui/`, `apps/wallet-cli/`
- `apps/miner-node/`, `apps/miner-dashboard/`
- `packages/py/aitbc-core/`, `aitbc-p2p/`, `aitbc-scheduler/`
- `packages/js/ui-widgets/`
- `python-sdk/`, `windsurf/`, `configs/`, `docs/user-guide/`, `docs/bootstrap/`
- `api/`, `governance/`, `protocols/`
- 5 GPU miner variants, 3 extension variants
-**Docs Folder Reorganization**
- Root now contains only: `done.md`, `files.md`, `roadmap.md`
- Created new subfolders: `_config/`, `reference/components/`, `reference/governance/`
- Created: `operator/deployment/`, `operator/migration/`
- Created: `developer/testing/`, `developer/integration/`
- Moved 25 files to appropriate subfolders
- Moved receipt spec: `protocols/receipts/spec.md``docs/reference/specs/receipt-spec.md`
-**Roadmap Updates**
- Added Stage 19: Placeholder Content Development
- Added Stage 20: Technical Debt Remediation (blockchain-node, solidity-token, ZKReceiptVerifier)

317
docs/files.md Normal file
View File

@@ -0,0 +1,317 @@
# AITBC Repository File Audit
This document categorizes all files and folders in the repository by their status:
- **Whitelist (✅)**: Active, up-to-date, essential
- **Greylist (⚠️)**: Uncertain status, may need review
- **Blacklist (❌)**: Legacy, unused, outdated, candidates for removal
Last updated: 2026-01-24
---
## Whitelist ✅ (Active & Essential)
### Core Applications (`apps/`)
| Path | Status | Notes |
|------|--------|-------|
| `apps/coordinator-api/` | ✅ Active | Main API service, recently updated (Jan 2026) |
| `apps/explorer-web/` | ✅ Active | Blockchain explorer, recently updated |
| `apps/wallet-daemon/` | ✅ Active | Wallet service, deployed in production |
| `apps/trade-exchange/` | ✅ Active | Bitcoin exchange, deployed |
| `apps/zk-circuits/` | ✅ Active | ZK proof circuits, deployed |
| `apps/marketplace-web/` | ✅ Active | Marketplace frontend, deployed |
### Scripts (`scripts/`)
| Path | Status | Notes |
|------|--------|-------|
| `scripts/aitbc-cli.sh` | ✅ Active | Main CLI tool, heavily used |
| `scripts/gpu/gpu_miner_host.py` | ✅ Active | Production GPU miner |
| `scripts/gpu/gpu_miner_host_wrapper.sh` | ✅ Active | Systemd wrapper |
| `scripts/deploy/` | ✅ Active | Deployment scripts |
| `scripts/service/` | ✅ Active | Service management |
| `scripts/dev_services.sh` | ✅ Active | Local development |
### Infrastructure (`infra/`, `systemd/`)
| Path | Status | Notes |
|------|--------|-------|
| `infra/nginx/` | ✅ Active | Production nginx configs |
| `systemd/aitbc-host-gpu-miner.service` | ✅ Active | Production service |
| `systemd/coordinator-api.service` | ✅ Active | Production service |
### Website (`website/`)
| Path | Status | Notes |
|------|--------|-------|
| `website/docs/` | ✅ Active | HTML documentation, recently refactored |
| `website/docs/css/docs.css` | ✅ Active | Shared CSS (1232 lines) |
| `website/docs/js/theme.js` | ✅ Active | Theme toggle |
| `website/index.html` | ✅ Active | Main website |
| `website/dashboards/` | ✅ Active | Admin/miner dashboards |
### Documentation (`docs/`)
| Path | Status | Notes |
|------|--------|-------|
| `docs/done.md` | ✅ Active | Completion tracking |
| `docs/roadmap.md` | ✅ Active | Development roadmap |
| `docs/developer/testing/localhost-testing-scenario.md` | ✅ Active | Testing guide |
| `docs/reference/components/miner_node.md` | ✅ Active | Miner documentation |
| `docs/reference/components/coordinator_api.md` | ✅ Active | API documentation |
| `docs/developer/integration/skills-framework.md` | ✅ Active | Skills documentation |
### Cascade Skills (`.windsurf/`)
| Path | Status | Notes |
|------|--------|-------|
| `.windsurf/skills/blockchain-operations/` | ✅ Active | Node management skill |
| `.windsurf/skills/deploy-production/` | ✅ Active | Deployment skill |
| `.windsurf/workflows/` | ✅ Active | Workflow definitions |
### CLI Tools (`cli/`)
| Path | Status | Notes |
|------|--------|-------|
| `cli/client.py` | ✅ Active | Client CLI |
| `cli/miner.py` | ✅ Active | Miner CLI |
| `cli/wallet.py` | ✅ Active | Wallet CLI |
| `cli/test_ollama_gpu_provider.py` | ✅ Active | GPU testing |
### Home Scripts (`home/`)
| Path | Status | Notes |
|------|--------|-------|
| `home/client/` | ✅ Active | Client test scripts |
| `home/miner/` | ✅ Active | Miner test scripts |
| `home/quick_job.py` | ✅ Active | Quick job submission |
| `home/simple_job_flow.py` | ✅ Active | Job flow testing |
### Plugins (`plugins/`)
| Path | Status | Notes |
|------|--------|-------|
| `plugins/ollama/` | ✅ Active | Ollama integration |
### Root Files
| Path | Status | Notes |
|------|--------|-------|
| `README.md` | ✅ Active | Project readme |
| `LICENSE` | ✅ Active | License file |
| `.gitignore` | ✅ Active | Recently updated (145 lines) |
| `pyproject.toml` | ✅ Active | Python project config |
| `.editorconfig` | ✅ Active | Editor config |
---
## Greylist ⚠️ (Needs Review)
### Applications - Uncertain Status
| Path | Status | Notes |
|------|--------|-------|
| `apps/blockchain-node/` | 📋 Planned | Has code, SQLModel issues - see roadmap Stage 20 |
### Packages
| Path | Status | Notes |
|------|--------|-------|
| `packages/solidity/aitbc-token/` | 📋 Planned | Smart contracts, deployment planned - see roadmap Stage 20 |
### Scripts
| Path | Status | Notes |
|------|--------|-------|
| `scripts/test/` | ✅ Keep | 7 test scripts, all current (Jan 2026) |
### Documentation
| Path | Status | Notes |
|------|--------|-------|
| `docs/developer/` | ✅ Keep | 6 markdown files |
| `docs/operator/` | ✅ Keep | 5 markdown files |
| `docs/user/` | ✅ Keep | 1 markdown file |
| `docs/tutorials/` | ✅ Keep | 3 markdown files |
### Infrastructure
| Path | Status | Notes |
|------|--------|-------|
| `infra/k8s/` | ✅ Keep | 5 yaml files (backup, cert-manager, netpol, sealed-secrets) |
### Extensions
| Path | Status | Notes |
|------|--------|-------|
| `extensions/aitbc-wallet-firefox/` | ✅ Keep | Firefox extension source (7 files) |
| `extensions/aitbc-wallet-firefox-v1.0.5.xpi` | ✅ Keep | Built extension package |
### Other
| Path | Status | Notes |
|------|--------|-------|
| `contracts/ZKReceiptVerifier.sol` | 📋 Planned | ZK verifier contract - see roadmap Stage 20 |
| `docs/reference/specs/receipt-spec.md` | ✅ Keep | Canonical receipt schema (moved from protocols/) |
---
## Future Placeholders 📋 (Keep - Will Be Populated)
These empty folders are intentional scaffolding for planned future work per the roadmap.
| Path | Status | Roadmap Stage |
|------|--------|---------------|
| `docs/user/guides/` | 📋 Placeholder | Stage 5 - Documentation |
| `docs/developer/tutorials/` | 📋 Placeholder | Stage 5 - Documentation |
| `docs/reference/specs/` | 📋 Placeholder | Stage 5 - Documentation |
| `infra/terraform/environments/staging/` | 📋 Placeholder | Stage 5 - Infrastructure |
| `infra/terraform/environments/prod/` | 📋 Placeholder | Stage 5 - Infrastructure |
| `infra/helm/values/dev/` | 📋 Placeholder | Stage 5 - Infrastructure |
| `infra/helm/values/staging/` | 📋 Placeholder | Stage 5 - Infrastructure |
| `infra/helm/values/prod/` | 📋 Placeholder | Stage 5 - Infrastructure |
| `apps/coordinator-api/migrations/` | 📋 Placeholder | Alembic migrations |
| `apps/pool-hub/src/app/routers/` | 📋 Placeholder | Stage 3 - Pool Hub |
| `apps/pool-hub/src/app/registry/` | 📋 Placeholder | Stage 3 - Pool Hub |
| `apps/pool-hub/src/app/scoring/` | 📋 Placeholder | Stage 3 - Pool Hub |
---
## Blacklist ❌ (Abandoned - Remove)
### Abandoned Empty Folders (Created but never used)
| Path | Status | Notes |
|------|--------|-------|
| `apps/client-web/src/` | ❌ Remove | Created Sep 2025, never implemented |
| `apps/client-web/public/` | ❌ Remove | Created Sep 2025, never implemented |
| `apps/marketplace-ui/` | ❌ Remove | Superseded by `marketplace-web` |
| `apps/wallet-cli/` | ❌ Remove | Superseded by `cli/wallet.py` |
| `packages/py/aitbc-core/src/` | ❌ Remove | Created Sep 2025, never implemented |
| `packages/py/aitbc-p2p/src/` | ❌ Remove | Created Sep 2025, never implemented |
| `packages/py/aitbc-scheduler/src/` | ❌ Remove | Created Sep 2025, never implemented |
| `packages/js/ui-widgets/src/` | ❌ Remove | Created Sep 2025, never implemented |
| `protocols/api/` | ❌ Remove | Never implemented |
| `protocols/payouts/` | ❌ Remove | Never implemented |
| `data/fixtures/` | ❌ Remove | Never populated |
| `data/samples/` | ❌ Remove | Never populated |
| `tools/mkdiagram/` | ❌ Remove | Never implemented |
| `examples/quickstart-client-python/` | ❌ Remove | Never implemented |
| `examples/quickstart-client-js/node/` | ❌ Remove | Never implemented |
| `examples/quickstart-client-js/browser/` | ❌ Remove | Never implemented |
| `examples/receipts-sign-verify/python/` | ❌ Remove | Never implemented |
| `examples/receipts-sign-verify/js/` | ❌ Remove | Never implemented |
| `scripts/env/` | ❌ Remove | Never populated |
| `windsurf/prompts/` | ❌ Remove | Superseded by `.windsurf/` |
| `windsurf/tasks/` | ❌ Remove | Superseded by `.windsurf/` |
### Duplicate/Redundant Folders
| Path | Status | Notes |
|------|--------|-------|
| `python-sdk/` | ❌ Duplicate | Duplicates `packages/py/aitbc-sdk/` |
| `windsurf/` | ❌ Duplicate | Superseded by `.windsurf/` |
| `configs/` | ❌ Duplicate | Empty subfolders, duplicates `infra/` and `systemd/` |
| `docs/user-guide/` | ❌ Duplicate | Duplicates `docs/user/` |
### Ecosystem Folders (Scaffolded but Unused)
| Path | Status | Notes |
|------|--------|-------|
| `ecosystem/` | ❌ Unused | Only has empty `academic/` subfolder |
| `ecosystem-analytics/` | ❌ Unused | Scaffolded Dec 2025, never used |
| `ecosystem-certification/` | ❌ Unused | Scaffolded Dec 2025, never used |
| `ecosystem-extensions/` | ❌ Unused | Only has template folder |
| `enterprise-connectors/` | ❌ Unused | Scaffolded Dec 2025, never used |
### Research Folders (Scaffolded but Unused)
| Path | Status | Notes |
|------|--------|-------|
| `research/autonomous-agents/` | ❌ Unused | Scaffolded, no active work |
| `research/consortium/` | ❌ Unused | Scaffolded, no active work |
| `research/prototypes/` | ❌ Unused | Scaffolded, no active work |
| `research/standards/` | ❌ Unused | Scaffolded, no active work |
### Generated/Build Artifacts (Should be in .gitignore)
| Path | Status | Notes |
|------|--------|-------|
| `packages/solidity/aitbc-token/typechain-types/` | ❌ Generated | Build artifact |
| `apps/explorer-web/dist/` | ❌ Generated | Build artifact |
| `logs/` | ❌ Generated | Runtime logs |
---
## Summary Statistics
| Category | Count | Action |
|----------|-------|--------|
| **Whitelist ✅** | ~60 items | Keep and maintain |
| **Greylist ⚠️** | 0 items | All resolved! |
| **Placeholders 📋** | 12 folders | Fill per roadmap Stage 19 |
| **Removed ❌** | 35 items | Cleaned up 2026-01-24 |
### Completed Actions (2026-01-24)
1. **Cleanup Done**:
- ✅ Removed 21 abandoned/duplicate folders
- ✅ Updated `.gitignore` with comprehensive rules
- ✅ Created this audit document
2. **Additional Cleanup (2026-01-24)**:
- ✅ Removed `apps/miner-node/` (superseded by `scripts/gpu/`)
- ✅ Removed `apps/miner-dashboard/` (superseded by `website/dashboards/`)
- ✅ Removed `docs/bootstrap/` (empty)
- ✅ Removed 5 GPU miner variants (kept only `gpu_miner_host.py`)
- ✅ Removed 3 extension variants (kept only `aitbc-wallet-firefox/`)
3. **Final Cleanup (2026-01-24)**:
- ✅ Removed `api/` folder (mock no longer needed - using live production)
- ✅ Removed `governance/` folder (too far in future)
- ✅ Removed `protocols/` folder (spec moved to docs/reference/specs/)
- ✅ Moved `protocols/receipts/spec.md``docs/reference/specs/receipt-spec.md`
- ✅ Added ZKReceiptVerifier and receipt spec to roadmap Stage 20
4. **Placeholder Plan** (see `roadmap.md` Stage 19):
- Q1 2026: Documentation folders (`docs/user/guides/`, `docs/developer/tutorials/`, `docs/reference/specs/`)
- Q2 2026: Infrastructure (`infra/terraform/`, `infra/helm/`)
- Q2 2026: Pool Hub components
---
## Folder Structure Recommendation
```
aitbc/
├── apps/ # Core applications
│ ├── coordinator-api/ # ✅ Keep
│ ├── explorer-web/ # ✅ Keep
│ ├── marketplace-web/ # ✅ Keep
│ ├── wallet-daemon/ # ✅ Keep
│ └── zk-circuits/ # ✅ Keep
├── cli/ # ✅ CLI tools
├── docs/ # ✅ Markdown documentation
├── infra/ # ✅ Infrastructure configs
├── packages/ # ✅ Keep (aitbc-crypto, aitbc-sdk, aitbc-token)
├── plugins/ # ✅ Keep (ollama)
├── scripts/ # ✅ Keep - organized
├── systemd/ # ✅ Keep
├── tests/ # ✅ Keep (e2e, integration, unit, security, load)
├── website/ # ✅ Keep
└── .windsurf/ # ✅ Keep
```
**Folders Removed (2026-01-24)**:
-`ecosystem*/` (all 4 folders) - removed
-`enterprise-connectors/` - removed
-`research/` - removed
-`python-sdk/` - removed (duplicate)
-`windsurf/` - removed (duplicate of `.windsurf/`)
-`configs/` - removed (duplicated `infra/`)
- ✅ Empty `apps/` subfolders - removed (client-web, marketplace-ui, wallet-cli)
- ✅ Empty `packages/` subfolders - removed (aitbc-core, aitbc-p2p, aitbc-scheduler, ui-widgets)
- ✅ Empty `examples/` subfolders - removed
-`tools/` - removed (empty)
-`docs/user-guide/` - removed (duplicate)

View File

@@ -1,204 +0,0 @@
site_name: AITBC Documentation
site_description: AI Trusted Blockchain Computing Platform Documentation
site_author: AITBC Team
site_url: https://docs.aitbc.io
# Repository
repo_name: aitbc/docs
repo_url: https://github.com/aitbc/docs
edit_uri: edit/main/docs/
# Copyright
copyright: Copyright &copy; 2024 AITBC Team
# Configuration
theme:
name: material
language: en
features:
- announce.dismiss
- content.action.edit
- content.action.view
- content.code.annotate
- content.code.copy
- content.tabs.link
- content.tooltips
- header.autohide
- navigation.expand
- navigation.footer
- navigation.indexes
- navigation.instant
- navigation.instant.prefetch
- navigation.instant.progress
- navigation.instant.scroll
- navigation.prune
- navigation.sections
- navigation.tabs
- navigation.tabs.sticky
- navigation.top
- navigation.tracking
- search.highlight
- search.share
- search.suggest
- toc.follow
- toc.integrate
palette:
- scheme: default
primary: blue
accent: blue
toggle:
icon: material/brightness-7
name: Switch to dark mode
- scheme: slate
primary: blue
accent: blue
toggle:
icon: material/brightness-4
name: Switch to light mode
font:
text: Roboto
code: Roboto Mono
favicon: assets/favicon.png
logo: assets/logo.png
# Plugins
plugins:
- search:
separator: '[\s\-,:!=\[\]()"/]+|(?!\b)(?=[A-Z][a-z])|\.(?!\d)|&[lg]t;'
- minify:
minify_html: true
- git-revision-date-localized:
enable_creation_date: true
type: datetime
timezone: UTC
- awesome-pages
- glightbox
- mkdocs-video
- social:
cards_layout_options:
font_family: Roboto
# Customization
extra:
analytics:
provider: google
property: !ENV GOOGLE_ANALYTICS_KEY
social:
- icon: fontawesome/brands/github
link: https://github.com/aitbc
- icon: fontawesome/brands/twitter
link: https://twitter.com/aitbc
- icon: fontawesome/brands/discord
link: https://discord.gg/aitbc
version:
provider: mike
default: stable
generator: false
# Extensions
markdown_extensions:
- abbr
- admonition
- attr_list
- def_list
- footnotes
- md_in_html
- toc:
permalink: true
- pymdownx.arithmatex:
generic: true
- pymdownx.betterem:
smart_enable: all
- pymdownx.caret
- pymdownx.details
- pymdownx.emoji:
emoji_generator: !!python/name:material.extensions.emoji.to_svg
emoji_index: !!python/name:material.extensions.emoji.twemoji
- pymdownx.highlight:
anchor_linenums: true
line_spans: __span
pygments_lang_class: true
- pymdownx.inlinehilite
- pymdownx.keys
- pymdownx.magiclink:
repo_url_shorthand: true
user: aitbc
repo: docs
- pymdownx.mark
- pymdownx.smartsymbols
- pymdownx.superfences:
custom_fences:
- name: mermaid
class: mermaid
format: !!python/name:pymdownx.superfences.fence_code_format
- pymdownx.tabbed:
alternate_style: true
- pymdownx.tasklist:
custom_checkbox: true
- pymdownx.tilde
# Navigation
nav:
- Home: index.md
- Getting Started:
- Introduction: getting-started/introduction.md
- Quickstart: getting-started/quickstart.md
- Installation: getting-started/installation.md
- Architecture: getting-started/architecture.md
- User Guide:
- Overview: user-guide/overview.md
- Trade Exchange: trade_exchange.md
- Zero-Knowledge Applications: zk-applications.md
- Creating Jobs: user-guide/creating-jobs.md
- Marketplace: user-guide/marketplace.md
- Explorer: user-guide/explorer.md
- Wallet Management: user-guide/wallet-management.md
- Developer Guide:
- Overview: developer-guide/overview.md
- Setup: developer-guide/setup.md
- API Authentication: developer-guide/api-authentication.md
- SDKs:
- Python SDK: developer-guide/sdks/python.md
- JavaScript SDK: developer-guide/sdks/javascript.md
- Examples: developer-guide/examples.md
- Contributing: developer-guide/contributing.md
- API Reference:
- Coordinator API:
- Overview: api/coordinator/overview.md
- Authentication: api/coordinator/authentication.md
- Endpoints: api/coordinator/endpoints.md
- OpenAPI Spec: api/coordinator/openapi.md
- ZK Applications API:
- Overview: api/zk/overview.md
- Endpoints: api/zk/endpoints.md
- Circuits: api/zk/circuits.md
- OpenAPI Spec: api/zk/openapi.md
- Blockchain Node API:
- Overview: api/blockchain/overview.md
- WebSocket API: api/blockchain/websocket.md
- JSON-RPC API: api/blockchain/jsonrpc.md
- OpenAPI Spec: api/blockchain/openapi.md
- Wallet Daemon API:
- Overview: api/wallet/overview.md
- Endpoints: api/wallet/endpoints.md
- OpenAPI Spec: api/wallet/openapi.md
- Operations:
- Deployment: operations/deployment.md
- Monitoring: operations/monitoring.md
- Security: operations/security.md
- Backup & Restore: operations/backup-restore.md
- Troubleshooting: operations/troubleshooting.md
- Tutorials:
- Building a DApp: tutorials/building-dapp.md
- Mining Setup: tutorials/mining-setup.md
- Running a Node: tutorials/running-node.md
- Integration Examples: tutorials/integration-examples.md
- Resources:
- Glossary: resources/glossary.md
- FAQ: resources/faq.md
- Support: resources/support.md
- Changelog: resources/changelog.md
# Page tree
plugins:
- awesome-pages

View File

@@ -1,205 +0,0 @@
# Completed Bootstrap Tasks
## Repository Initialization
- Scaffolded core monorepo directories reflected in `docs/bootstrap/dirs.md`.
- Added top-level config files: `.editorconfig`, `.gitignore`, `LICENSE`, and root `README.md`.
- Created Windsurf workspace metadata under `windsurf/`.
## Documentation
- Authored `docs/roadmap.md` capturing staged development targets.
- Added README placeholders for primary apps under `apps/` to outline purpose and setup notes.
## Coordinator API
- Implemented SQLModel-backed job persistence and service layer in `apps/coordinator-api/src/app/`.
- Wired client, miner, and admin routers to coordinator services (job lifecycle, scheduling, stats).
- Added initial pytest coverage under `apps/coordinator-api/tests/test_jobs.py`.
- Added signed receipt generation, persistence (`Job.receipt`, `JobReceipt` history table), retrieval endpoints, telemetry metrics, and optional coordinator attestations.
- Persisted historical receipts via `JobReceipt`; exposed `/v1/jobs/{job_id}/receipts` endpoint and integrated canonical serialization.
- Documented receipt attestation configuration (`RECEIPT_ATTESTATION_KEY_HEX`) in `docs/run.md` and coordinator README.
## Miner Node
- Created coordinator client, control loop, and capability/backoff utilities in `apps/miner-node/src/aitbc_miner/`.
- Implemented CLI/Python runners and execution pipeline with result reporting.
- Added starter tests for runners in `apps/miner-node/tests/test_runners.py`.
## Blockchain Node
- Added websocket fan-out, disconnect cleanup, and load-test coverage in `apps/blockchain-node/tests/test_websocket.py`, ensuring gossip topics deliver reliably to multiple subscribers.
## Directory Preparation
- Established scaffolds for Python and JavaScript packages in `packages/py/` and `packages/js/`.
- Seeded example project directories under `examples/` for quickstart clients and receipt verification.
- Added `examples/receipts-sign-verify/fetch_and_verify.py` demonstrating coordinator receipt fetching + verification using Python SDK.
## Python SDK
- Created `packages/py/aitbc-sdk/` with coordinator receipt client and verification helpers consuming `aitbc_crypto` utilities.
- Added pytest coverage under `packages/py/aitbc-sdk/tests/test_receipts.py` validating miner/coordinator signature checks and client behavior.
## Wallet Daemon
- Added `apps/wallet-daemon/src/app/receipts/service.py` providing `ReceiptVerifierService` that fetches and validates receipts via `aitbc_sdk`.
- Created unit tests under `apps/wallet-daemon/tests/test_receipts.py` verifying service behavior.
- Implemented wallet SDK receipt ingestion + attestation surfacing in `packages/py/aitbc-sdk/src/receipts.py`, including pagination client, signature verification, and failure diagnostics with full pytest coverage.
- Hardened REST API by wiring dependency overrides in `apps/wallet-daemon/tests/test_wallet_api.py`, expanding workflow coverage (create/list/unlock/sign) and enforcing structured password policy errors consumed in CI.
## Explorer Web
- Initialized a Vite + TypeScript scaffold in `apps/explorer-web/` with `vite.config.ts`, `tsconfig.json`, and placeholder `src/main.ts` content.
- Installed frontend dependencies locally to unblock editor tooling and TypeScript type resolution.
- Implemented `overview` page stats rendering backed by mock block/transaction/receipt fetchers, including robust empty-state handling and TypeScript type fixes.
## Pool Hub
- Implemented FastAPI service scaffolding with Redis/PostgreSQL-backed repositories, match/health/metrics endpoints, and Prometheus instrumentation (`apps/pool-hub/src/poolhub/`).
- Added Alembic migrations (`apps/pool-hub/migrations/`) and async integration tests covering repositories and endpoints (`apps/pool-hub/tests/`).
## Solidity Token
- Implemented attested minting logic in `packages/solidity/aitbc-token/contracts/AIToken.sol` using `AccessControl` role gates and ECDSA signature recovery.
- Added Hardhat unit tests in `packages/solidity/aitbc-token/test/aitoken.test.ts` covering successful minting, replay prevention, and invalid attestor signatures.
- Configured project TypeScript settings via `packages/solidity/aitbc-token/tsconfig.json` to align Hardhat, Node, and Mocha typings for the contract test suite.
## JavaScript SDK
- Delivered fetch-based client wrapper with TypeScript definitions and Vitest coverage under `packages/js/aitbc-sdk/`.
## Blockchain Node Enhancements
- Added comprehensive WebSocket tests for blocks and transactions streams including multi-subscriber and high-volume scenarios.
- Extended PoA consensus with per-proposer block metrics and rotation tracking.
- Added latest block interval gauge and RPC error spike alerting.
- Enhanced observability with Grafana dashboards for blockchain node and coordinator overview.
- Implemented marketplace endpoints in coordinator API with explorer and marketplace routers.
- Added mock coordinator integration with enhanced telemetry capabilities.
- Created comprehensive observability documentation and alerting rules.
## Explorer Web Production Readiness
- Implemented Playwright end-to-end tests for live mode functionality.
- Enhanced responsive design with improved CSS layout system.
- Added comprehensive error handling and fallback mechanisms for live API responses.
- Integrated live coordinator endpoints with proper data reconciliation.
## Marketplace Web Launch
- Completed auth/session scaffolding for marketplace actions.
- Implemented API abstraction layer with mock/live mode toggle.
- Connected mock listings and bids to coordinator data sources.
- Added feature flags for controlled live mode rollout.
## Cross-Chain Settlement
- Implemented cross-chain settlement hooks with external bridges.
- Created BridgeAdapter interface for LayerZero integration.
- Implemented BridgeManager for orchestration and retry logic.
- Added settlement storage and API endpoints.
- Created cross-chain settlement documentation.
## Python SDK Transport Abstraction
- Designed pluggable transport abstraction layer for multi-network support.
- Implemented base Transport interface with HTTP/WebSocket transports.
- Created MultiNetworkClient for managing multiple blockchain networks.
- Updated AITBCClient to use transport abstraction with backward compatibility.
- Added transport documentation and examples.
## GPU Service Provider Configuration
- Extended Miner model to include service configurations.
- Created service configuration API endpoints in pool-hub.
- Built HTML/JS UI for service provider configuration.
- Added service pricing configuration and capability validation.
- Implemented service selection for GPU providers.
## GPU Service Expansion
- Implemented dynamic service registry framework for 30+ GPU services.
- Created service definitions for 6 categories: AI/ML, Media Processing, Scientific Computing, Data Analytics, Gaming, Development Tools.
- Built comprehensive service registry API with validation and discovery.
- Added hardware requirement checking and pricing models.
- Updated roadmap with service expansion phase documentation.
## Stage 7 - GPU Service Expansion & Privacy Features
### GPU Service Infrastructure
- Create dynamic service registry with JSON schema validation
- Implement service provider configuration UI with dynamic service selection
- Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems)
- Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing)
- Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics)
- Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis)
- Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering)
- Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation)
- Implement service-specific validation and hardware requirement checking
### Privacy & Cryptography Features
- ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow
- ✅ Implement Groth16 ZK circuit for receipt hash preimage proofs
- ✅ Create ZK proof generation service in coordinator API
- ✅ Implement on-chain verification contract (ZKReceiptVerifier.sol)
- ✅ Add confidential transaction support with opt-in ciphertext storage
- ✅ Implement HSM-backed key management (Azure Key Vault, AWS KMS, Software)
- ✅ Create hybrid encryption system (AES-256-GCM + X25519)
- ✅ Implement role-based access control with time restrictions
- ✅ Create tamper-evident audit logging with chain of hashes
- ✅ Publish comprehensive threat modeling with STRIDE analysis
- ✅ Update cross-chain settlement hooks for ZK proofs and privacy levels
### Enterprise Integration Features
- ✅ Deliver reference connectors for ERP/payment systems with Python SDK
- ✅ Implement Stripe payment connector with full charge/refund/subscription support
- ✅ Create enterprise-grade Python SDK with async support, dependency injection, metrics
- ✅ Build ERP connector base classes with plugin architecture for protocols
- ✅ Document comprehensive SLAs with uptime guarantees and support commitments
- ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation
- ✅ Implement tenant management service with lifecycle operations
- ✅ Create tenant context middleware for automatic tenant identification
- ✅ Build resource quota enforcement with Redis-backed caching
- ✅ Create usage tracking and billing metrics with tiered pricing
- ✅ Launch ecosystem certification program with SDK conformance testing
- ✅ Define Bronze/Silver/Gold certification tiers with clear requirements
- ✅ Build language-agnostic test suite with OpenAPI contract validation
- ✅ Implement security validation framework with dependency scanning
- ✅ Design public registry API for partner/SDK discovery
- ✅ Validate certification system with Stripe connector certification
### Community & Governance Features
- ✅ Establish open RFC process with clear stages and review criteria
- ✅ Create governance website with documentation and navigation
- ✅ Set up community call schedule with multiple call types
- ✅ Design RFC template and GitHub PR template for submissions
- ✅ Implement benevolent dictator model with sunset clause
- ✅ Create hybrid governance structure (GitHub + Discord + Website)
- ✅ Document participation guidelines and code of conduct
- ✅ Establish transparency and accountability processes
### Ecosystem Growth Initiatives
- ✅ Create hackathon organization framework with quarterly themes and bounty board
- ✅ Design grant program with hybrid approach (micro-grants + strategic grants)
- ✅ Build marketplace extension SDK with cookiecutter templates
- ✅ Create analytics tooling for ecosystem metrics and KPI tracking
- ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews
- ✅ Establish judging criteria with ecosystem impact weighting
- ✅ Create sponsor partnership framework with tiered benefits
- ✅ Design retroactive grants for proven projects
- ✅ Implement milestone-based disbursement for accountability
### Stage 8 - Frontier R&D & Global Expansion
- ✅ Launch research consortium framework with governance model and membership tiers
- ✅ Develop hybrid PoA/PoS consensus research plan with 12-month implementation timeline
- ✅ Create scaling research plan for sharding and rollups (100K+ TPS target)
- ✅ Design ZK applications research plan for privacy-preserving AI
- ✅ Create governance research plan with liquid democracy and AI assistance
- ✅ Develop economic models research plan with sustainable tokenomics
- ✅ Implement hybrid consensus prototype demonstrating dynamic mode switching
- ✅ Create executive summary for consortium recruitment
- ✅ Prototype sharding architecture with beacon chain coordination
- ✅ Implement ZK-rollup prototype for transaction batching
- ⏳ Set up consortium legal structure and operational infrastructure
- ⏳ Recruit founding members from industry and academia

View File

@@ -1,458 +0,0 @@
# AITBC Development Roadmap
This roadmap aggregates high-priority tasks derived from the bootstrap specifications in `docs/bootstrap/` and tracks progress across the monorepo. Update this document as milestones evolve.
## Stage 1 — Upcoming Focus Areas [COMPLETED: 2025-12-22]
- **Blockchain Node Foundations**
- ✅ Bootstrap module layout in `apps/blockchain-node/src/`.
- ✅ Implement SQLModel schemas and RPC stubs aligned with historical/attested receipts.
- **Explorer Web Enablement**
- ✅ Finish mock integration across all pages and polish styling + mock/live toggle.
- ✅ Begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`).
- **Marketplace Web Scaffolding**
- ✅ Scaffold Vite/vanilla frontends consuming coordinator receipt history endpoints and SDK examples.
- **Pool Hub Services**
- ✅ Initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics.
- **CI Enhancements**
- ✅ Add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows.
- ✅ Provide systemd unit + installer scripts under `scripts/` for streamlined deployment.
## Stage 2 — Core Services (MVP) [COMPLETED: 2025-12-22]
- **Coordinator API**
- ✅ Scaffold FastAPI project (`apps/coordinator-api/src/app/`).
- ✅ Implement job submission, status, result endpoints.
- ✅ Add miner registration, heartbeat, poll, result routes.
- ✅ Wire SQLite persistence for jobs, miners, receipts (historical `JobReceipt` table).
- ✅ Provide `.env.example`, `pyproject.toml`, and run scripts.
- ✅ Deploy minimal version in container with nginx proxy
- **Miner Node**
- ✅ Implement capability probe and control loop (register → heartbeat → fetch jobs).
- ✅ Build CLI and Python runners with sandboxed work dirs (result reporting stubbed to coordinator).
- **Blockchain Node**
- ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`).
- ✅ Harden schema parity across runtime + storage:
- Alembic baseline + follow-on migrations in `apps/blockchain-node/migrations/` now track the SQLModel schema (blocks, transactions, receipts, accounts).
- Added `Relationship` + `ForeignKey` wiring in `apps/blockchain-node/src/aitbc_chain/models.py` for block ↔ transaction ↔ receipt joins.
- Introduced hex/enum validation hooks via Pydantic validators to ensure hash integrity and safe persistence.
- ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`).
- ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`).
- ✅ Deliver WebSocket RPC + P2P gossip layer:
- ✅ Stand up WebSocket subscription endpoints (`apps/blockchain-node/src/aitbc_chain/rpc/websocket.py`) mirroring REST payloads.
- ✅ Implement pub/sub transport for block + transaction gossip backed by an in-memory broker (Starlette `Broadcast` or Redis) with configurable fan-out.
- ✅ Add integration tests and load-test harness ensuring gossip convergence and back-pressure handling.
- ✅ Ship devnet scripts (`apps/blockchain-node/scripts/`).
- ✅ Add observability hooks (JSON logging, Prometheus metrics) and integrate coordinator mock into devnet tooling.
- ✅ Expand observability dashboards + miner mock integration:
- Build Grafana dashboards for consensus health (block intervals, proposer rotation) and RPC latency (`apps/blockchain-node/observability/`).
- Expose miner mock telemetry (job throughput, error rates) via shared Prometheus registry and ingest into blockchain-node dashboards.
- Add alerting rules (Prometheus `Alertmanager`) for stalled proposers, queue saturation, and miner mock disconnects.
- Wire coordinator mock into devnet tooling to simulate real-world load and validate observability hooks.
- **Receipt Schema**
- ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts).
- ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending).
- ✅ Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project (`packages/solidity/aitbc-token/`).
- ✅ Add deployment/test scripts and document minting flow (`packages/solidity/aitbc-token/scripts/` and `docs/run.md`).
- **Wallet Daemon**
- ✅ Implement encrypted keystore (Argon2id + XChaCha20-Poly1305) via `KeystoreService`.
- ✅ Provide REST and JSON-RPC endpoints for wallet management and signing (`api_rest.py`, `api_jsonrpc.py`).
- ✅ Add mock ledger adapter with SQLite backend powering event history (`ledger_mock/`).
- ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures.
- ✅ Harden REST API workflows (create/list/unlock/sign) with structured password policy enforcement and deterministic pytest coverage in `apps/wallet-daemon/tests/test_wallet_api.py`.
- ✅ Implement Wallet SDK receipt ingestion + attestation surfacing:
- Added `/v1/jobs/{job_id}/receipts` client helpers with cursor pagination, retry/backoff, and summary reporting (`packages/py/aitbc-sdk/src/receipts.py`).
- Reused crypto helpers to validate miner and coordinator signatures, capturing per-key failure reasons for downstream UX.
- Surfaced aggregated attestation status (`ReceiptStatus`) and failure diagnostics for SDK + UI consumers; JS helper parity still planned.
## Stage 3 — Pool Hub & Marketplace [COMPLETED: 2025-12-22]
- **Pool Hub**
- ✅ Implement miner registry, scoring engine, and `/v1/match` API with Redis/PostgreSQL backing stores.
- ✅ Add observability endpoints (`/v1/health`, `/v1/metrics`) plus Prometheus instrumentation and integration tests.
- **Marketplace Web**
- ✅ Initialize Vite project with vanilla TypeScript (`apps/marketplace-web/`).
- ✅ Build offer list, bid form, and stats cards powered by mock data fixtures (`public/mock/`).
- ✅ Provide API abstraction toggling mock/live mode (`src/lib/api.ts`) and wire coordinator endpoints.
- ✅ Validate live mode against coordinator `/v1/marketplace/*` responses and add auth feature flags for rollout.
- ✅ Deploy to production at https://aitbc.bubuit.net/marketplace/
- **Explorer Web**
- ✅ Initialize Vite + TypeScript project scaffold (`apps/explorer-web/`).
- ✅ Add routed pages for overview, blocks, transactions, addresses, receipts.
- ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables.
- ✅ Extend mock integrations to transactions, addresses, and receipts pages.
- ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold.
- ✅ Render overview stats from mock block/transaction/receipt summaries with graceful empty-state fallbacks.
- ✅ Validate live mode + responsive polish:
- Hit live coordinator endpoints via nginx (`/api/explorer/blocks`, `/api/explorer/transactions`, `/api/explorer/addresses`, `/api/explorer/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models.
- Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics).
- Audit responsive breakpoints (`public/css/layout.css`) and adjust grid/typography for tablet + mobile; add regression checks in Percy/Playwright snapshots.
- ✅ Deploy to production at https://aitbc.bubuit.net/explorer/ with genesis block display
## Stage 4 — Observability & Production Polish
- **Observability & Telemetry**
- ✅ Build Grafana dashboards for PoA consensus health (block intervals, proposer rotation cadence) leveraging `poa_last_block_interval_seconds`, `poa_proposer_rotations_total`, and per-proposer counters.
- ✅ Surface RPC latency histograms/summaries for critical endpoints (`rpc_get_head`, `rpc_send_tx`, `rpc_submit_receipt`) and add Grafana panels with SLO thresholds.
- ✅ Ingest miner mock telemetry (job throughput, failure rate) into the shared Prometheus registry and wire panels/alerts that correlate miner health with consensus metrics.
- **Explorer Web (Live Mode)**
- ✅ Finalize live `getDataMode() === "live"` workflow: align API payload contracts, render loading/error states, and persist mock/live toggle preference.
- ✅ Expand responsive testing (tablet/mobile) and add automated visual regression snapshots prior to launch.
- ✅ Integrate Playwright smoke tests covering overview, blocks, and transactions pages in live mode.
- **Marketplace Web (Launch Readiness)**
- ✅ Connect mock listings/bids to coordinator data sources and provide feature flags for live mode rollout.
- ✅ Implement auth/session scaffolding for marketplace actions and document API assumptions in `apps/marketplace-web/README.md`.
- ✅ Add Grafana panels monitoring marketplace API throughput and error rates once endpoints are live.
- **Operational Hardening**
- ✅ Extend Alertmanager rules to cover RPC error spikes, proposer stalls, and miner disconnects using the new metrics.
- ✅ Document dashboard import + alert deployment steps in `docs/run.md` for operators.
- ✅ Prepare Stage 3 release checklist linking dashboards, alerts, and smoke tests prior to production cutover.
- ✅ Enable host GPU miner with coordinator proxy routing and systemd-backed coordinator service; add proxy health timer.
## Stage 5 — Scaling & Release Readiness
- **Infrastructure Scaling**
- ✅ Benchmark blockchain node throughput under sustained load; capture CPU/memory targets and suggest horizontal scaling thresholds.
- ✅ Build Terraform/Helm templates for dev/staging/prod environments, including Prometheus/Grafana bundles.
- ✅ Implement autoscaling policies for coordinator, miners, and marketplace services with synthetic traffic tests.
- **Reliability & Compliance**
- ✅ Formalize backup/restore procedures for PostgreSQL, Redis, and ledger storage with scheduled jobs.
- ✅ Complete security hardening review (TLS termination, API auth, secrets management) and document mitigations in `docs/security.md`.
- ✅ Add chaos testing scripts (network partition, coordinator outage) and track mean-time-to-recovery metrics.
- **Product Launch Checklist**
- ✅ Finalize public documentation (API references, onboarding guides) and publish to the docs portal.
- ✅ Coordinate beta release timeline, including user acceptance testing of explorer/marketplace live modes.
- ✅ Establish post-launch monitoring playbooks and on-call rotations.
## Stage 6 — Ecosystem Expansion
- **Cross-Chain & Interop**
- ✅ Prototype cross-chain settlement hooks leveraging external bridges; document integration patterns.
- ✅ Extend SDKs (Python/JS) with pluggable transport abstractions for multi-network support.
- 🔄 Evaluate third-party explorer/analytics integrations and publish partner onboarding guides.
- **Marketplace Growth**
- 🔄 Launch incentive programs (staking, liquidity mining) and expose telemetry dashboards tracking campaign performance.
- 🔄 Implement governance module (proposal voting, parameter changes) and add API/UX flows to explorer/marketplace.
- 🔄 Provide SLA-backed coordinator/pool hubs with capacity planning and billing instrumentation.
- **Developer Experience**
- ✅ Publish advanced tutorials (custom proposers, marketplace extensions) and maintain versioned API docs.
- 🔄 Integrate CI/CD pipelines with canary deployments and blue/green release automation.
- 🔄 Host quarterly architecture reviews capturing lessons learned and feeding into roadmap revisions.
## Stage 7 — Innovation & Ecosystem Services
- **GPU Service Expansion**
- ✅ Implement dynamic service registry framework for 30+ GPU-accelerated services
- ✅ Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems)
- ✅ Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing)
- ✅ Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics)
- ✅ Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis)
- ✅ Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering)
- ✅ Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation)
- ✅ Deploy service provider configuration UI with dynamic service selection
- ✅ Implement service-specific validation and hardware requirement checking
- **Advanced Cryptography & Privacy**
- ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow.
- ✅ Add confidential transaction support with opt-in ciphertext storage and HSM-backed key management.
- ✅ Publish threat modeling updates and share mitigations with ecosystem partners.
- **Enterprise Integrations**
- ✅ Deliver reference connectors for ERP/payment systems and document SLA expectations.
- ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation and billing metrics.
- ✅ Launch ecosystem certification program (SDK conformance, security best practices) with public registry.
- **Community & Governance**
- ✅ Establish open RFC process, publish governance website, and schedule regular community calls.
- ✅ Sponsor hackathons/accelerators and provide grants for marketplace extensions and analytics tooling.
- ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews.
## Stage 8 — Frontier R&D & Global Expansion [COMPLETED: 2025-12-28]
- **Protocol Evolution**
- ✅ Launch research consortium exploring next-gen consensus (hybrid PoA/PoS) and finalize whitepapers.
- 🔄 Prototype sharding or rollup architectures to scale throughput beyond current limits.
- 🔄 Standardize interoperability specs with industry bodies and submit proposals for adoption.
- **Global Rollout**
- 🔄 Establish regional infrastructure hubs (multi-cloud) with localized compliance and data residency guarantees.
- 🔄 Partner with regulators/enterprises to pilot regulated marketplaces and publish compliance playbooks.
- 🔄 Expand localization (UI, documentation, support) covering top target markets.
- **Long-Term Sustainability**
- 🔄 Create sustainability fund for ecosystem maintenance, bug bounties, and community stewardship.
- 🔄 Define succession planning for core teams, including training programs and contributor pathways.
- 🔄 Publish bi-annual roadmap retrospectives assessing KPI alignment and revising long-term goals.
## Stage 9 — Moonshot Initiatives [COMPLETED: 2025-12-28]
- **Decentralized Infrastructure**
- 🔄 Transition coordinator/miner roles toward community-governed validator sets with incentive alignment.
- 🔄 Explore decentralized storage/backbone options (IPFS/Filecoin) for ledger and marketplace artifacts.
- 🔄 Prototype fully trustless marketplace settlement leveraging zero-knowledge rollups.
- **AI & Automation**
- 🔄 Integrate AI-driven monitoring/anomaly detection for proposer health, market liquidity, and fraud detection.
- 🔄 Automate incident response playbooks with ChatOps and policy engines.
- 🔄 Launch research into autonomous agent participation (AI agents bidding/offering in the marketplace) and governance implications.
- **Global Standards Leadership**
- 🔄 Chair industry working groups defining receipt/marketplace interoperability standards.
- 🔄 Publish annual transparency reports and sustainability metrics for stakeholders.
- 🔄 Engage with academia and open-source foundations to steward long-term protocol evolution.
### Stage 10 — Stewardship & Legacy Planning [COMPLETED: 2025-12-28]
- **Open Governance Maturity**
- 🔄 Transition roadmap ownership to community-elected councils with transparent voting and treasury controls.
- 🔄 Codify constitutional documents (mission, values, conflict resolution) and publish public charters.
- 🔄 Implement on-chain governance modules for protocol upgrades and ecosystem-wide decisions.
- **Educational & Outreach Programs**
- 🔄 Fund university partnerships, research chairs, and developer fellowships focused on decentralized marketplace tech.
- 🔄 Create certification tracks and mentorship programs for new validator/operators.
- 🔄 Launch annual global summit and publish proceedings to share best practices across partners.
- **Long-Term Preservation**
- 🔄 Archive protocol specs, governance records, and cultural artifacts in decentralized storage with redundancy.
- 🔄 Establish legal/organizational frameworks to ensure continuity across jurisdictions.
- 🔄 Develop end-of-life/transition plans for legacy components, documenting deprecation strategies and migration tooling.
## Shared Libraries & Examples
## Stage 11 — Trade Exchange & Token Economy [COMPLETED: 2025-12-28]
- **Bitcoin Wallet Integration**
- ✅ Implement Bitcoin payment gateway for AITBC token purchases
- ✅ Create payment request API with unique payment addresses
- ✅ Add QR code generation for mobile payments
- ✅ Implement real-time payment monitoring with blockchain API
- ✅ Configure exchange rate: 1 BTC = 100,000 AITBC
- **User Management System**
- ✅ Implement wallet-based authentication with session management
- ✅ Create individual user accounts with unique wallets
- ✅ Add user profile pages with transaction history
- ✅ Implement secure session tokens with 24-hour expiry
- ✅ Add login/logout functionality across all pages
- **Trade Exchange Platform**
- ✅ Build responsive trading interface with real-time price updates
- ✅ Integrate Bitcoin payment flow with QR code display
- ✅ Add payment status monitoring and confirmation handling
- ✅ Implement AITBC token minting upon payment confirmation
- ✅ Deploy to production at https://aitbc.bubuit.net/Exchange/
- **API Infrastructure**
- ✅ Add user management endpoints (/api/users/*)
- ✅ Implement exchange payment endpoints (/api/exchange/*)
- ✅ Add session-based authentication for protected routes
- ✅ Create transaction history and balance tracking APIs
- ✅ Fix all import and syntax errors in coordinator API
## Stage 13 — Explorer Live API & Reverse Proxy Fixes [COMPLETED: 2025-12-28]
- **Explorer Live API**
- ✅ Enable coordinator explorer routes at `/v1/explorer/*`.
- ✅ Expose nginx explorer proxy at `/api/explorer/*` (maps to backend `/v1/explorer/*`).
- ✅ Fix response schema mismatches (e.g., receipts response uses `jobId`).
- **Coordinator API Users/Login**
- ✅ Ensure `/v1/users/login` is registered and working.
- ✅ Fix missing SQLModel tables by initializing DB on startup (wallet/user tables created).
- **nginx Reverse Proxy Hardening**
- ✅ Fix `/api/v1/*` routing to avoid double `/v1` prefix.
- ✅ Add compatibility proxy for Exchange: `/api/users/*` → backend `/v1/users/*`.
## Stage 12 — Zero-Knowledge Proof Implementation [COMPLETED: 2025-12-28]
- **Circom Compiler Setup**
- ✅ Install Circom compiler v2.2.3 on production server
- ✅ Configure Node.js environment for ZK circuit compilation
- ✅ Install circomlib and required dependencies
- **ZK Circuit Development**
- ✅ Create receipt attestation circuit (receipt_simple.circom)
- ✅ Implement membership proof circuit template
- ✅ Implement bid range proof circuit template
- ✅ Compile circuits to R1CS, WASM, and symbolic files
- **Trusted Setup Ceremony**
- ✅ Perform Powers of Tau setup ceremony (2^12)
- ✅ Generate proving keys (zkey) for Groth16
- ✅ Export verification keys for on-chain verification
- ✅ Complete phase 2 preparation with contributions
- **ZK Applications API**
- ✅ Implement identity commitment endpoints
- ✅ Create stealth address generation service
- ✅ Add private receipt attestation API
- ✅ Implement group membership proof verification
- ✅ Add private bidding functionality
- ✅ Create computation proof verification
- ✅ Deploy to production at /api/zk/ endpoints
- **Integration & Deployment**
- ✅ Integrate ZK proof service with coordinator API
- ✅ Configure circuit files in production environment
- ✅ Enable ZK proof generation in coordinator service
- ✅ Update documentation with ZK capabilities
## Stage 14 — Explorer JavaScript Error Fixes [COMPLETED: 2025-12-30]
- **JavaScript Error Resolution**
- ✅ Fixed "can't access property 'length', t is undefined" error on Explorer page load
- ✅ Updated fetchMock function in mockData.ts to return correct structure with 'items' property
- ✅ Added defensive null checks in all page init functions (overview, blocks, transactions, addresses, receipts)
- ✅ Fixed TypeScript errors for null checks and missing properties
- ✅ Deployed fixes to production server (/var/www/aitbc.bubuit.net/explorer/)
- ✅ Configured mock data serving from correct path (/explorer/mock/)
## Stage 15 — Cascade Skills Framework [COMPLETED: 2025-01-19]
- **Skills Infrastructure**
- ✅ Implement Cascade skills framework for complex workflow automation
- ✅ Create skills directory structure at `.windsurf/skills/`
- ✅ Define skill metadata format with YAML frontmatter
- ✅ Add progressive disclosure for intelligent skill invocation
- **Deploy-Production Skill**
- ✅ Create comprehensive deployment workflow skill
- ✅ Implement pre-deployment validation script (disk, memory, services, SSL)
- ✅ Add environment template with all production variables
- ✅ Create rollback procedures with emergency steps
- ✅ Build health check script for post-deployment verification
- **Blockchain-Operations Skill**
- ✅ Create node health monitoring with peer analysis and sync status
- ✅ Implement transaction tracer for debugging and gas optimization
- ✅ Build GPU mining optimization script for NVIDIA/AMD cards
- ✅ Add real-time sync monitor with visual progress bar
- ✅ Create network diagnostics tool with connectivity analysis
- **Skills Integration**
- ✅ Enable automatic skill invocation based on context
- ✅ Add manual skill triggering with keyword detection
- ✅ Implement error handling and logging in all skills
- ✅ Create comprehensive documentation and usage examples
## Stage 16 — Service Maintenance & Optimization [COMPLETED: 2026-01-21]
- **Service Recovery**
- ✅ Diagnose and fix all failing AITBC container services
- ✅ Resolve duplicate service conflicts causing port binding errors
- ✅ Fix marketplace service implementation (missing server.py)
- ✅ Disable redundant services to prevent resource conflicts
- **System Administration**
- ✅ Configure passwordless SSH access for automation
- ✅ Create dedicated SSH keys for secure service management
- ✅ Document service dependencies and port mappings
- ✅ Establish service monitoring procedures
- **Service Status Verification**
- ✅ Verify all 7 core services running correctly
- ✅ Confirm proper nginx reverse proxy configuration
- ✅ Validate API endpoints accessibility
- ✅ Test service recovery procedures
## Stage 17 — Ollama GPU Inference & CLI Tooling [COMPLETED: 2026-01-24]
- **End-to-End Ollama Testing**
- ✅ Verify complete GPU inference workflow from job submission to receipt generation
- ✅ Test Ollama integration with multiple models (llama3.2, mistral, deepseek, etc.)
- ✅ Validate job lifecycle: QUEUED → RUNNING → COMPLETED
- ✅ Confirm receipt generation with accurate payment calculations
- ✅ Record transactions on blockchain with proper metadata
- **Coordinator API Bug Fixes**
- ✅ Fix missing `_coerce_float()` helper function causing 500 errors
- ✅ Deploy fix to production incus container via SSH
- ✅ Verify result submission returns 200 OK with valid receipts
- ✅ Validate receipt payload structure and signature generation
- **Miner Configuration & Optimization**
- ✅ Fix miner ID mismatch (host-gpu-miner → REDACTED_MINER_KEY)
- ✅ Enhance logging with explicit flush handlers for systemd journal
- ✅ Configure unbuffered Python logging environment variables
- ✅ Create systemd service unit with proper environment configuration
- **CLI Tooling Development**
- ✅ Create unified bash CLI wrapper (`scripts/aitbc-cli.sh`)
- ✅ Implement commands: submit, status, browser, blocks, receipts, cancel
- ✅ Add admin commands: admin-miners, admin-jobs, admin-stats
- ✅ Support environment variable overrides for URL and API keys
- ✅ Make script executable and document usage patterns
- **Blockchain-Operations Skill Enhancement**
- ✅ Add comprehensive Ollama testing scenarios to skill
- ✅ Create detailed test documentation (`ollama-test-scenario.md`)
- ✅ Document common issues and troubleshooting procedures
- ✅ Add performance metrics and expected results
- ✅ Include end-to-end automation script template
- **Documentation Updates**
- ✅ Update localhost testing scenario with CLI wrapper usage
- ✅ Convert examples to use localhost URLs (127.0.0.1)
- ✅ Add host user paths and quick start commands
- ✅ Document complete workflow from setup to verification
- ✅ Update skill documentation with testing scenarios
## Stage 18 — Repository Reorganization & CSS Consolidation [COMPLETED: 2026-01-24]
- **Root Level Cleanup**
- ✅ Move 60+ loose files from root to proper directories
- ✅ Organize deployment scripts into `scripts/deploy/`
- ✅ Organize GPU miner files into `scripts/gpu/`
- ✅ Organize test/verify files into `scripts/test/`
- ✅ Organize service management scripts into `scripts/service/`
- ✅ Move systemd services to `systemd/`
- ✅ Move nginx configs to `infra/nginx/`
- ✅ Move dashboards to `website/dashboards/`
- **Website/Docs Folder Structure**
- ✅ Establish `/website/docs/` as source for HTML documentation
- ✅ Create shared CSS file (`css/docs.css`) with 1232 lines
- ✅ Create theme toggle JavaScript (`js/theme.js`)
- ✅ Migrate all HTML files to use external CSS (45-66% size reduction)
- ✅ Clean `/docs/` folder to only contain mkdocs markdown files
- **Documentation Styling Fixes**
- ✅ Fix dark theme background consistency across all docs pages
- ✅ Add dark theme support to `full-documentation.html`
- ✅ Fix Quick Start section cascade styling in docs-miners.html
- ✅ Fix SDK Examples cascade indentation in docs-clients.html
- ✅ Fix malformed `</code-block>` tags across all docs
- ✅ Update API endpoint example to use Python/FastAPI
- **Path Reference Updates**
- ✅ Update systemd service file with new `scripts/gpu/gpu_miner_host.py` path
- ✅ Update skill documentation with new file locations
- ✅ Update localhost-testing-scenario.md with correct paths
- ✅ Update gpu_miner_host_wrapper.sh with new path
- **Repository Maintenance**
- ✅ Expand .gitignore from 39 to 145 lines with organized sections
- ✅ Add project-specific ignore rules for coordinator, explorer, GPU miner
- ✅ Document final folder structure in done.md
the canonical checklist during implementation. Mark completed tasks with ✅ and add dates or links to relevant PRs as development progresses.

View File

@@ -1,27 +0,0 @@
# MkDocs Material Theme
mkdocs-material==9.4.8
mkdocs-material-extensions==1.3.1
# MkDocs Core and Plugins
mkdocs==1.5.3
mkdocs-git-revision-date-localized-plugin==1.2.6
mkdocs-awesome-pages-plugin==2.9.2
mkdocs-minify-plugin==0.7.4
mkdocs-glightbox==0.3.4
mkdocs-video==1.5.0
mkdocs-social-plugin==1.0.0
mkdocs-macros-plugin==1.0.5
# Python Extensions for Markdown
pymdown-extensions==10.8.1
markdown-include==0.8.0
mkdocs-mermaid2-plugin==1.1.1
# Additional dependencies
requests==2.31.0
aiohttp==3.9.1
python-dotenv==1.0.0
# Development dependencies
mkdocs-redirects==1.2.1
mkdocs-monorepo-plugin==1.0.2

View File

@@ -1 +0,0 @@
reference/roadmap.md

600
docs/roadmap.md Normal file
View File

@@ -0,0 +1,600 @@
# AITBC Development Roadmap
This roadmap aggregates high-priority tasks derived from the bootstrap specifications in `docs/bootstrap/` and tracks progress across the monorepo. Update this document as milestones evolve.
## Stage 1 — Upcoming Focus Areas [COMPLETED: 2025-12-22]
- **Blockchain Node Foundations**
- ✅ Bootstrap module layout in `apps/blockchain-node/src/`.
- ✅ Implement SQLModel schemas and RPC stubs aligned with historical/attested receipts.
- **Explorer Web Enablement**
- ✅ Finish mock integration across all pages and polish styling + mock/live toggle.
- ✅ Begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`).
- **Marketplace Web Scaffolding**
- ✅ Scaffold Vite/vanilla frontends consuming coordinator receipt history endpoints and SDK examples.
- **Pool Hub Services**
- ✅ Initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics.
- **CI Enhancements**
- ✅ Add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows.
- ✅ Provide systemd unit + installer scripts under `scripts/` for streamlined deployment.
## Stage 2 — Core Services (MVP) [COMPLETED: 2025-12-22]
- **Coordinator API**
- ✅ Scaffold FastAPI project (`apps/coordinator-api/src/app/`).
- ✅ Implement job submission, status, result endpoints.
- ✅ Add miner registration, heartbeat, poll, result routes.
- ✅ Wire SQLite persistence for jobs, miners, receipts (historical `JobReceipt` table).
- ✅ Provide `.env.example`, `pyproject.toml`, and run scripts.
- ✅ Deploy minimal version in container with nginx proxy
- **Miner Node**
- ✅ Implement capability probe and control loop (register → heartbeat → fetch jobs).
- ✅ Build CLI and Python runners with sandboxed work dirs (result reporting stubbed to coordinator).
- **Blockchain Node**
- ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`).
- ✅ Harden schema parity across runtime + storage:
- Alembic baseline + follow-on migrations in `apps/blockchain-node/migrations/` now track the SQLModel schema (blocks, transactions, receipts, accounts).
- Added `Relationship` + `ForeignKey` wiring in `apps/blockchain-node/src/aitbc_chain/models.py` for block ↔ transaction ↔ receipt joins.
- Introduced hex/enum validation hooks via Pydantic validators to ensure hash integrity and safe persistence.
- ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`).
- ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`).
- ✅ Deliver WebSocket RPC + P2P gossip layer:
- ✅ Stand up WebSocket subscription endpoints (`apps/blockchain-node/src/aitbc_chain/rpc/websocket.py`) mirroring REST payloads.
- ✅ Implement pub/sub transport for block + transaction gossip backed by an in-memory broker (Starlette `Broadcast` or Redis) with configurable fan-out.
- ✅ Add integration tests and load-test harness ensuring gossip convergence and back-pressure handling.
- ✅ Ship devnet scripts (`apps/blockchain-node/scripts/`).
- ✅ Add observability hooks (JSON logging, Prometheus metrics) and integrate coordinator mock into devnet tooling.
- ✅ Expand observability dashboards + miner mock integration:
- Build Grafana dashboards for consensus health (block intervals, proposer rotation) and RPC latency (`apps/blockchain-node/observability/`).
- Expose miner mock telemetry (job throughput, error rates) via shared Prometheus registry and ingest into blockchain-node dashboards.
- Add alerting rules (Prometheus `Alertmanager`) for stalled proposers, queue saturation, and miner mock disconnects.
- Wire coordinator mock into devnet tooling to simulate real-world load and validate observability hooks.
- **Receipt Schema**
- ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts).
- ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending).
- ✅ Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project (`packages/solidity/aitbc-token/`).
- ✅ Add deployment/test scripts and document minting flow (`packages/solidity/aitbc-token/scripts/` and `docs/run.md`).
- **Wallet Daemon**
- ✅ Implement encrypted keystore (Argon2id + XChaCha20-Poly1305) via `KeystoreService`.
- ✅ Provide REST and JSON-RPC endpoints for wallet management and signing (`api_rest.py`, `api_jsonrpc.py`).
- ✅ Add mock ledger adapter with SQLite backend powering event history (`ledger_mock/`).
- ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures.
- ✅ Harden REST API workflows (create/list/unlock/sign) with structured password policy enforcement and deterministic pytest coverage in `apps/wallet-daemon/tests/test_wallet_api.py`.
- ✅ Implement Wallet SDK receipt ingestion + attestation surfacing:
- Added `/v1/jobs/{job_id}/receipts` client helpers with cursor pagination, retry/backoff, and summary reporting (`packages/py/aitbc-sdk/src/receipts.py`).
- Reused crypto helpers to validate miner and coordinator signatures, capturing per-key failure reasons for downstream UX.
- Surfaced aggregated attestation status (`ReceiptStatus`) and failure diagnostics for SDK + UI consumers; JS helper parity still planned.
## Stage 3 — Pool Hub & Marketplace [COMPLETED: 2025-12-22]
- **Pool Hub**
- ✅ Implement miner registry, scoring engine, and `/v1/match` API with Redis/PostgreSQL backing stores.
- ✅ Add observability endpoints (`/v1/health`, `/v1/metrics`) plus Prometheus instrumentation and integration tests.
- **Marketplace Web**
- ✅ Initialize Vite project with vanilla TypeScript (`apps/marketplace-web/`).
- ✅ Build offer list, bid form, and stats cards powered by mock data fixtures (`public/mock/`).
- ✅ Provide API abstraction toggling mock/live mode (`src/lib/api.ts`) and wire coordinator endpoints.
- ✅ Validate live mode against coordinator `/v1/marketplace/*` responses and add auth feature flags for rollout.
- ✅ Deploy to production at https://aitbc.bubuit.net/marketplace/
- **Explorer Web**
- ✅ Initialize Vite + TypeScript project scaffold (`apps/explorer-web/`).
- ✅ Add routed pages for overview, blocks, transactions, addresses, receipts.
- ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables.
- ✅ Extend mock integrations to transactions, addresses, and receipts pages.
- ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold.
- ✅ Render overview stats from mock block/transaction/receipt summaries with graceful empty-state fallbacks.
- ✅ Validate live mode + responsive polish:
- Hit live coordinator endpoints via nginx (`/api/explorer/blocks`, `/api/explorer/transactions`, `/api/explorer/addresses`, `/api/explorer/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models.
- Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics).
- Audit responsive breakpoints (`public/css/layout.css`) and adjust grid/typography for tablet + mobile; add regression checks in Percy/Playwright snapshots.
- ✅ Deploy to production at https://aitbc.bubuit.net/explorer/ with genesis block display
## Stage 4 — Observability & Production Polish
- **Observability & Telemetry**
- ✅ Build Grafana dashboards for PoA consensus health (block intervals, proposer rotation cadence) leveraging `poa_last_block_interval_seconds`, `poa_proposer_rotations_total`, and per-proposer counters.
- ✅ Surface RPC latency histograms/summaries for critical endpoints (`rpc_get_head`, `rpc_send_tx`, `rpc_submit_receipt`) and add Grafana panels with SLO thresholds.
- ✅ Ingest miner mock telemetry (job throughput, failure rate) into the shared Prometheus registry and wire panels/alerts that correlate miner health with consensus metrics.
- **Explorer Web (Live Mode)**
- ✅ Finalize live `getDataMode() === "live"` workflow: align API payload contracts, render loading/error states, and persist mock/live toggle preference.
- ✅ Expand responsive testing (tablet/mobile) and add automated visual regression snapshots prior to launch.
- ✅ Integrate Playwright smoke tests covering overview, blocks, and transactions pages in live mode.
- **Marketplace Web (Launch Readiness)**
- ✅ Connect mock listings/bids to coordinator data sources and provide feature flags for live mode rollout.
- ✅ Implement auth/session scaffolding for marketplace actions and document API assumptions in `apps/marketplace-web/README.md`.
- ✅ Add Grafana panels monitoring marketplace API throughput and error rates once endpoints are live.
- **Operational Hardening**
- ✅ Extend Alertmanager rules to cover RPC error spikes, proposer stalls, and miner disconnects using the new metrics.
- ✅ Document dashboard import + alert deployment steps in `docs/run.md` for operators.
- ✅ Prepare Stage 3 release checklist linking dashboards, alerts, and smoke tests prior to production cutover.
- ✅ Enable host GPU miner with coordinator proxy routing and systemd-backed coordinator service; add proxy health timer.
## Stage 5 — Scaling & Release Readiness
- **Infrastructure Scaling**
- ✅ Benchmark blockchain node throughput under sustained load; capture CPU/memory targets and suggest horizontal scaling thresholds.
- ✅ Build Terraform/Helm templates for dev/staging/prod environments, including Prometheus/Grafana bundles.
- ✅ Implement autoscaling policies for coordinator, miners, and marketplace services with synthetic traffic tests.
- **Reliability & Compliance**
- ✅ Formalize backup/restore procedures for PostgreSQL, Redis, and ledger storage with scheduled jobs.
- ✅ Complete security hardening review (TLS termination, API auth, secrets management) and document mitigations in `docs/security.md`.
- ✅ Add chaos testing scripts (network partition, coordinator outage) and track mean-time-to-recovery metrics.
- **Product Launch Checklist**
- ✅ Finalize public documentation (API references, onboarding guides) and publish to the docs portal.
- ✅ Coordinate beta release timeline, including user acceptance testing of explorer/marketplace live modes.
- ✅ Establish post-launch monitoring playbooks and on-call rotations.
## Stage 6 — Ecosystem Expansion
- **Cross-Chain & Interop**
- ✅ Prototype cross-chain settlement hooks leveraging external bridges; document integration patterns.
- ✅ Extend SDKs (Python/JS) with pluggable transport abstractions for multi-network support.
- 🔄 Evaluate third-party explorer/analytics integrations and publish partner onboarding guides.
- **Marketplace Growth**
- 🔄 Launch incentive programs (staking, liquidity mining) and expose telemetry dashboards tracking campaign performance.
- 🔄 Implement governance module (proposal voting, parameter changes) and add API/UX flows to explorer/marketplace.
- 🔄 Provide SLA-backed coordinator/pool hubs with capacity planning and billing instrumentation.
- **Developer Experience**
- ✅ Publish advanced tutorials (custom proposers, marketplace extensions) and maintain versioned API docs.
- 🔄 Integrate CI/CD pipelines with canary deployments and blue/green release automation.
- 🔄 Host quarterly architecture reviews capturing lessons learned and feeding into roadmap revisions.
## Stage 7 — Innovation & Ecosystem Services
- **GPU Service Expansion**
- ✅ Implement dynamic service registry framework for 30+ GPU-accelerated services
- ✅ Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems)
- ✅ Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing)
- ✅ Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics)
- ✅ Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis)
- ✅ Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering)
- ✅ Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation)
- ✅ Deploy service provider configuration UI with dynamic service selection
- ✅ Implement service-specific validation and hardware requirement checking
- **Advanced Cryptography & Privacy**
- ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow.
- ✅ Add confidential transaction support with opt-in ciphertext storage and HSM-backed key management.
- ✅ Publish threat modeling updates and share mitigations with ecosystem partners.
- **Enterprise Integrations**
- ✅ Deliver reference connectors for ERP/payment systems and document SLA expectations.
- ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation and billing metrics.
- ✅ Launch ecosystem certification program (SDK conformance, security best practices) with public registry.
- **Community & Governance**
- ✅ Establish open RFC process, publish governance website, and schedule regular community calls.
- ✅ Sponsor hackathons/accelerators and provide grants for marketplace extensions and analytics tooling.
- ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews.
## Stage 8 — Frontier R&D & Global Expansion [COMPLETED: 2025-12-28]
- **Protocol Evolution**
- ✅ Launch research consortium exploring next-gen consensus (hybrid PoA/PoS) and finalize whitepapers.
- 🔄 Prototype sharding or rollup architectures to scale throughput beyond current limits.
- 🔄 Standardize interoperability specs with industry bodies and submit proposals for adoption.
- **Global Rollout**
- 🔄 Establish regional infrastructure hubs (multi-cloud) with localized compliance and data residency guarantees.
- 🔄 Partner with regulators/enterprises to pilot regulated marketplaces and publish compliance playbooks.
- 🔄 Expand localization (UI, documentation, support) covering top target markets.
- **Long-Term Sustainability**
- 🔄 Create sustainability fund for ecosystem maintenance, bug bounties, and community stewardship.
- 🔄 Define succession planning for core teams, including training programs and contributor pathways.
- 🔄 Publish bi-annual roadmap retrospectives assessing KPI alignment and revising long-term goals.
## Stage 9 — Moonshot Initiatives [COMPLETED: 2025-12-28]
- **Decentralized Infrastructure**
- 🔄 Transition coordinator/miner roles toward community-governed validator sets with incentive alignment.
- 🔄 Explore decentralized storage/backbone options (IPFS/Filecoin) for ledger and marketplace artifacts.
- 🔄 Prototype fully trustless marketplace settlement leveraging zero-knowledge rollups.
- **AI & Automation**
- 🔄 Integrate AI-driven monitoring/anomaly detection for proposer health, market liquidity, and fraud detection.
- 🔄 Automate incident response playbooks with ChatOps and policy engines.
- 🔄 Launch research into autonomous agent participation (AI agents bidding/offering in the marketplace) and governance implications.
- **Global Standards Leadership**
- 🔄 Chair industry working groups defining receipt/marketplace interoperability standards.
- 🔄 Publish annual transparency reports and sustainability metrics for stakeholders.
- 🔄 Engage with academia and open-source foundations to steward long-term protocol evolution.
### Stage 10 — Stewardship & Legacy Planning [COMPLETED: 2025-12-28]
- **Open Governance Maturity**
- 🔄 Transition roadmap ownership to community-elected councils with transparent voting and treasury controls.
- 🔄 Codify constitutional documents (mission, values, conflict resolution) and publish public charters.
- 🔄 Implement on-chain governance modules for protocol upgrades and ecosystem-wide decisions.
- **Educational & Outreach Programs**
- 🔄 Fund university partnerships, research chairs, and developer fellowships focused on decentralized marketplace tech.
- 🔄 Create certification tracks and mentorship programs for new validator/operators.
- 🔄 Launch annual global summit and publish proceedings to share best practices across partners.
- **Long-Term Preservation**
- 🔄 Archive protocol specs, governance records, and cultural artifacts in decentralized storage with redundancy.
- 🔄 Establish legal/organizational frameworks to ensure continuity across jurisdictions.
- 🔄 Develop end-of-life/transition plans for legacy components, documenting deprecation strategies and migration tooling.
## Shared Libraries & Examples
## Stage 11 — Trade Exchange & Token Economy [COMPLETED: 2025-12-28]
- **Bitcoin Wallet Integration**
- ✅ Implement Bitcoin payment gateway for AITBC token purchases
- ✅ Create payment request API with unique payment addresses
- ✅ Add QR code generation for mobile payments
- ✅ Implement real-time payment monitoring with blockchain API
- ✅ Configure exchange rate: 1 BTC = 100,000 AITBC
- **User Management System**
- ✅ Implement wallet-based authentication with session management
- ✅ Create individual user accounts with unique wallets
- ✅ Add user profile pages with transaction history
- ✅ Implement secure session tokens with 24-hour expiry
- ✅ Add login/logout functionality across all pages
- **Trade Exchange Platform**
- ✅ Build responsive trading interface with real-time price updates
- ✅ Integrate Bitcoin payment flow with QR code display
- ✅ Add payment status monitoring and confirmation handling
- ✅ Implement AITBC token minting upon payment confirmation
- ✅ Deploy to production at https://aitbc.bubuit.net/Exchange/
- **API Infrastructure**
- ✅ Add user management endpoints (/api/users/*)
- ✅ Implement exchange payment endpoints (/api/exchange/*)
- ✅ Add session-based authentication for protected routes
- ✅ Create transaction history and balance tracking APIs
- ✅ Fix all import and syntax errors in coordinator API
## Stage 13 — Explorer Live API & Reverse Proxy Fixes [COMPLETED: 2025-12-28]
- **Explorer Live API**
- ✅ Enable coordinator explorer routes at `/v1/explorer/*`.
- ✅ Expose nginx explorer proxy at `/api/explorer/*` (maps to backend `/v1/explorer/*`).
- ✅ Fix response schema mismatches (e.g., receipts response uses `jobId`).
- **Coordinator API Users/Login**
- ✅ Ensure `/v1/users/login` is registered and working.
- ✅ Fix missing SQLModel tables by initializing DB on startup (wallet/user tables created).
- **nginx Reverse Proxy Hardening**
- ✅ Fix `/api/v1/*` routing to avoid double `/v1` prefix.
- ✅ Add compatibility proxy for Exchange: `/api/users/*` → backend `/v1/users/*`.
## Stage 12 — Zero-Knowledge Proof Implementation [COMPLETED: 2025-12-28]
- **Circom Compiler Setup**
- ✅ Install Circom compiler v2.2.3 on production server
- ✅ Configure Node.js environment for ZK circuit compilation
- ✅ Install circomlib and required dependencies
- **ZK Circuit Development**
- ✅ Create receipt attestation circuit (receipt_simple.circom)
- ✅ Implement membership proof circuit template
- ✅ Implement bid range proof circuit template
- ✅ Compile circuits to R1CS, WASM, and symbolic files
- **Trusted Setup Ceremony**
- ✅ Perform Powers of Tau setup ceremony (2^12)
- ✅ Generate proving keys (zkey) for Groth16
- ✅ Export verification keys for on-chain verification
- ✅ Complete phase 2 preparation with contributions
- **ZK Applications API**
- ✅ Implement identity commitment endpoints
- ✅ Create stealth address generation service
- ✅ Add private receipt attestation API
- ✅ Implement group membership proof verification
- ✅ Add private bidding functionality
- ✅ Create computation proof verification
- ✅ Deploy to production at /api/zk/ endpoints
- **Integration & Deployment**
- ✅ Integrate ZK proof service with coordinator API
- ✅ Configure circuit files in production environment
- ✅ Enable ZK proof generation in coordinator service
- ✅ Update documentation with ZK capabilities
## Stage 14 — Explorer JavaScript Error Fixes [COMPLETED: 2025-12-30]
- **JavaScript Error Resolution**
- ✅ Fixed "can't access property 'length', t is undefined" error on Explorer page load
- ✅ Updated fetchMock function in mockData.ts to return correct structure with 'items' property
- ✅ Added defensive null checks in all page init functions (overview, blocks, transactions, addresses, receipts)
- ✅ Fixed TypeScript errors for null checks and missing properties
- ✅ Deployed fixes to production server (/var/www/aitbc.bubuit.net/explorer/)
- ✅ Configured mock data serving from correct path (/explorer/mock/)
## Stage 15 — Cascade Skills Framework [COMPLETED: 2025-01-19]
- **Skills Infrastructure**
- ✅ Implement Cascade skills framework for complex workflow automation
- ✅ Create skills directory structure at `.windsurf/skills/`
- ✅ Define skill metadata format with YAML frontmatter
- ✅ Add progressive disclosure for intelligent skill invocation
- **Deploy-Production Skill**
- ✅ Create comprehensive deployment workflow skill
- ✅ Implement pre-deployment validation script (disk, memory, services, SSL)
- ✅ Add environment template with all production variables
- ✅ Create rollback procedures with emergency steps
- ✅ Build health check script for post-deployment verification
- **Blockchain-Operations Skill**
- ✅ Create node health monitoring with peer analysis and sync status
- ✅ Implement transaction tracer for debugging and gas optimization
- ✅ Build GPU mining optimization script for NVIDIA/AMD cards
- ✅ Add real-time sync monitor with visual progress bar
- ✅ Create network diagnostics tool with connectivity analysis
- **Skills Integration**
- ✅ Enable automatic skill invocation based on context
- ✅ Add manual skill triggering with keyword detection
- ✅ Implement error handling and logging in all skills
- ✅ Create comprehensive documentation and usage examples
## Stage 16 — Service Maintenance & Optimization [COMPLETED: 2026-01-21]
- **Service Recovery**
- ✅ Diagnose and fix all failing AITBC container services
- ✅ Resolve duplicate service conflicts causing port binding errors
- ✅ Fix marketplace service implementation (missing server.py)
- ✅ Disable redundant services to prevent resource conflicts
- **System Administration**
- ✅ Configure passwordless SSH access for automation
- ✅ Create dedicated SSH keys for secure service management
- ✅ Document service dependencies and port mappings
- ✅ Establish service monitoring procedures
- **Service Status Verification**
- ✅ Verify all 7 core services running correctly
- ✅ Confirm proper nginx reverse proxy configuration
- ✅ Validate API endpoints accessibility
- ✅ Test service recovery procedures
## Stage 17 — Ollama GPU Inference & CLI Tooling [COMPLETED: 2026-01-24]
- **End-to-End Ollama Testing**
- ✅ Verify complete GPU inference workflow from job submission to receipt generation
- ✅ Test Ollama integration with multiple models (llama3.2, mistral, deepseek, etc.)
- ✅ Validate job lifecycle: QUEUED → RUNNING → COMPLETED
- ✅ Confirm receipt generation with accurate payment calculations
- ✅ Record transactions on blockchain with proper metadata
- **Coordinator API Bug Fixes**
- ✅ Fix missing `_coerce_float()` helper function causing 500 errors
- ✅ Deploy fix to production incus container via SSH
- ✅ Verify result submission returns 200 OK with valid receipts
- ✅ Validate receipt payload structure and signature generation
- **Miner Configuration & Optimization**
- ✅ Fix miner ID mismatch (host-gpu-miner → REDACTED_MINER_KEY)
- ✅ Enhance logging with explicit flush handlers for systemd journal
- ✅ Configure unbuffered Python logging environment variables
- ✅ Create systemd service unit with proper environment configuration
- **CLI Tooling Development**
- ✅ Create unified bash CLI wrapper (`scripts/aitbc-cli.sh`)
- ✅ Implement commands: submit, status, browser, blocks, receipts, cancel
- ✅ Add admin commands: admin-miners, admin-jobs, admin-stats
- ✅ Support environment variable overrides for URL and API keys
- ✅ Make script executable and document usage patterns
- **Blockchain-Operations Skill Enhancement**
- ✅ Add comprehensive Ollama testing scenarios to skill
- ✅ Create detailed test documentation (`ollama-test-scenario.md`)
- ✅ Document common issues and troubleshooting procedures
- ✅ Add performance metrics and expected results
- ✅ Include end-to-end automation script template
- **Documentation Updates**
- ✅ Update localhost testing scenario with CLI wrapper usage
- ✅ Convert examples to use localhost URLs (127.0.0.1)
- ✅ Add host user paths and quick start commands
- ✅ Document complete workflow from setup to verification
- ✅ Update skill documentation with testing scenarios
## Stage 18 — Repository Reorganization & CSS Consolidation [COMPLETED: 2026-01-24]
- **Root Level Cleanup**
- ✅ Move 60+ loose files from root to proper directories
- ✅ Organize deployment scripts into `scripts/deploy/`
- ✅ Organize GPU miner files into `scripts/gpu/`
- ✅ Organize test/verify files into `scripts/test/`
- ✅ Organize service management scripts into `scripts/service/`
- ✅ Move systemd services to `systemd/`
- ✅ Move nginx configs to `infra/nginx/`
- ✅ Move dashboards to `website/dashboards/`
- **Website/Docs Folder Structure**
- ✅ Establish `/website/docs/` as source for HTML documentation
- ✅ Create shared CSS file (`css/docs.css`) with 1232 lines
- ✅ Create theme toggle JavaScript (`js/theme.js`)
- ✅ Migrate all HTML files to use external CSS (45-66% size reduction)
- ✅ Clean `/docs/` folder to only contain mkdocs markdown files
- **Documentation Styling Fixes**
- ✅ Fix dark theme background consistency across all docs pages
- ✅ Add dark theme support to `full-documentation.html`
- ✅ Fix Quick Start section cascade styling in docs-miners.html
- ✅ Fix SDK Examples cascade indentation in docs-clients.html
- ✅ Fix malformed `</code-block>` tags across all docs
- ✅ Update API endpoint example to use Python/FastAPI
- **Path Reference Updates**
- ✅ Update systemd service file with new `scripts/gpu/gpu_miner_host.py` path
- ✅ Update skill documentation with new file locations
- ✅ Update localhost-testing-scenario.md with correct paths
- ✅ Update gpu_miner_host_wrapper.sh with new path
- **Repository Maintenance**
- ✅ Expand .gitignore from 39 to 145 lines with organized sections
- ✅ Add project-specific ignore rules for coordinator, explorer, GPU miner
- ✅ Document final folder structure in done.md
- ✅ Create `docs/files.md` file audit with whitelist/greylist/blacklist
- ✅ Remove 35 abandoned/duplicate folders and files
- ✅ Reorganize `docs/` folder - root contains only done.md, files.md, roadmap.md
- ✅ Move 25 doc files to appropriate subfolders (components, deployment, migration, etc.)
## Stage 19 — Placeholder Content Development [PLANNED]
Fill the intentional placeholder folders with actual content. Priority order based on user impact.
### Phase 1: Documentation (High Priority)
- **User Guides** (`docs/user/guides/`)
- [ ] Getting started guide for new users
- [ ] Wallet setup and management
- [ ] Job submission workflow
- [ ] Payment and receipt understanding
- [ ] Troubleshooting common issues
- **Developer Tutorials** (`docs/developer/tutorials/`)
- [ ] Building a custom miner
- [ ] Integrating with Coordinator API
- [ ] Creating marketplace extensions
- [ ] Working with ZK proofs
- [ ] SDK usage examples (Python/JS)
- **Reference Specs** (`docs/reference/specs/`)
- [ ] Receipt JSON schema specification
- [ ] API endpoint reference (OpenAPI)
- [ ] Protocol message formats
- [ ] Error codes and handling
### Phase 2: Infrastructure (Medium Priority)
- **Terraform Environments** (`infra/terraform/environments/`)
- [ ] `staging/` - Staging environment config
- [ ] `prod/` - Production environment config
- [ ] Variables and secrets management
- [ ] State backend configuration
- **Helm Chart Values** (`infra/helm/values/`)
- [ ] `dev/` - Development values
- [ ] `staging/` - Staging values
- [ ] `prod/` - Production values
- [ ] Resource limits and scaling policies
### Phase 3: Application Components (Lower Priority)
- **Pool Hub Service** (`apps/pool-hub/src/app/`)
- [ ] `routers/` - API route handlers
- [ ] `registry/` - Miner registry implementation
- [ ] `scoring/` - Scoring engine logic
- **Coordinator Migrations** (`apps/coordinator-api/migrations/`)
- [ ] Initial schema migration
- [ ] Index optimizations
- [ ] Data migration scripts
### Placeholder Filling Schedule
| Folder | Target Date | Owner | Status |
|--------|-------------|-------|--------|
| `docs/user/guides/` | Q1 2026 | Documentation | 🔄 Planned |
| `docs/developer/tutorials/` | Q1 2026 | Documentation | 🔄 Planned |
| `docs/reference/specs/` | Q1 2026 | Documentation | 🔄 Planned |
| `infra/terraform/environments/` | Q2 2026 | DevOps | 🔄 Planned |
| `infra/helm/values/` | Q2 2026 | DevOps | 🔄 Planned |
| `apps/pool-hub/src/app/` | Q2 2026 | Backend | 🔄 Planned |
| `apps/coordinator-api/migrations/` | As needed | Backend | 🔄 Planned |
## Stage 20 — Technical Debt Remediation [PLANNED]
Address known issues in existing components that are blocking production use.
### Blockchain Node (`apps/blockchain-node/`)
Current Status: Has 9 Python files but SQLModel/SQLAlchemy compatibility issues.
- **SQLModel Compatibility**
- [ ] Audit current SQLModel schema definitions in `models.py`
- [ ] Fix relationship and foreign key wiring issues
- [ ] Resolve Alembic migration compatibility
- [ ] Add integration tests for database operations
- [ ] Document schema and migration procedures
- **Production Readiness**
- [ ] Fix PoA consensus loop stability
- [ ] Harden RPC endpoints for production load
- [ ] Add proper error handling and logging
- [ ] Create deployment documentation
### Solidity Token (`packages/solidity/aitbc-token/`)
Current Status: Smart contracts exist but not deployed to mainnet.
- **Contract Audit**
- [ ] Review AIToken.sol and AITokenRegistry.sol
- [ ] Run security analysis (Slither, Mythril)
- [ ] Fix any identified vulnerabilities
- [ ] Add comprehensive test coverage
- **Deployment Preparation**
- [ ] Configure deployment scripts for testnet
- [ ] Deploy to testnet and verify
- [ ] Document deployment process
- [ ] Plan mainnet deployment timeline
### ZK Receipt Verifier (`contracts/ZKReceiptVerifier.sol`)
Current Status: 240-line Groth16 verifier contract ready for deployment.
- **Integration with ZK Circuits**
- [ ] Verify compatibility with deployed `receipt_simple` circuit
- [ ] Test proof generation and verification flow
- [ ] Configure settlement contract integration
- [ ] Add authorized verifier management
- **Deployment**
- [ ] Deploy to testnet with ZK circuits
- [ ] Integration test with Coordinator API
- [ ] Document on-chain verification flow
### Receipt Specification (`docs/reference/specs/receipt-spec.md`)
Current Status: Canonical receipt schema specification moved from `protocols/receipts/`.
- **Specification Finalization**
- [x] Core schema defined (version 1.0)
- [x] Signature format specified (Ed25519)
- [x] Validation rules documented
- [ ] Add multi-signature receipt format
- [ ] Document ZK-proof metadata extension
- [ ] Add Merkle proof anchoring spec
### Technical Debt Schedule
| Component | Priority | Target | Status |
|-----------|----------|--------|--------|
| `apps/blockchain-node/` SQLModel fixes | Medium | Q2 2026 | 🔄 Planned |
| `packages/solidity/aitbc-token/` audit | Low | Q3 2026 | 🔄 Planned |
| `packages/solidity/aitbc-token/` testnet | Low | Q3 2026 | 🔄 Planned |
| `contracts/ZKReceiptVerifier.sol` deploy | Low | Q3 2026 | 🔄 Planned |
| `docs/reference/specs/receipt-spec.md` finalize | Low | Q2 2026 | 🔄 Planned |
the canonical checklist during implementation. Mark completed tasks with ✅ and add dates or links to relevant PRs as development progresses.

View File

@@ -1,49 +0,0 @@
---
title: Creating Jobs
description: Learn how to create and submit AI jobs
---
# Creating Jobs
Jobs are the primary way to execute AI workloads on the AITBC platform.
## Job Types
- **AI Inference**: Run pre-trained models
- **Model Training**: Train new models
- **Data Processing**: Process datasets
- **Custom**: Custom computations
## Job Specification
A job specification includes:
- Model configuration
- Input/output formats
- Resource requirements
- Pricing constraints
## Example
```yaml
name: "image-classification"
type: "ai-inference"
model:
type: "python"
entrypoint: "model.py"
```
## Submitting Jobs
Use the CLI or API to submit jobs:
```bash
aitbc job submit job.yaml
```
## Monitoring
Track job progress through:
- CLI commands
- Web interface
- API endpoints
- WebSocket streams

View File

@@ -1,49 +0,0 @@
---
title: Explorer
description: Using the AITBC blockchain explorer
---
# Explorer
The AITBC explorer allows you to browse and search the blockchain for transactions, jobs, and other activities.
## Features
### Transaction Search
- Search by transaction hash
- Filter by address
- View transaction details
### Job Tracking
- Monitor job status
- View job history
- Analyze performance
### Analytics
- Network statistics
- Volume metrics
- Activity charts
## Using the Explorer
### Web Interface
Visit [https://aitbc.bubuit.net/explorer/](https://aitbc.bubuit.net/explorer/)
### API Access
```bash
# Get transaction
curl https://aitbc.bubuit.net/api/v1/transactions/{tx_hash}
# Get job details
curl https://aitbc.bubuit.net/api/v1/jobs/{job_id}
# Explorer data (blocks)
curl https://aitbc.bubuit.net/api/explorer/blocks
```
## Advanced Features
- Real-time updates
- Custom dashboards
- Data export
- Alert notifications

View File

@@ -1,46 +0,0 @@
---
title: Marketplace
description: Using the AITBC marketplace
---
# Marketplace
The AITBC marketplace connects job creators with miners who can execute their AI workloads.
## How It Works
1. **Job Creation**: Users create jobs with specific requirements
2. **Offer Matching**: The marketplace finds suitable miners
3. **Execution**: Miners execute the jobs and submit results
4. **Payment**: Automatic payment upon successful completion
## Finding Services
Browse available services:
- By job type
- By price range
- By miner reputation
- By resource requirements
## Pricing
Dynamic pricing based on:
- Market demand
- Resource availability
- Miner reputation
- Job complexity
## Creating Offers
As a miner, you can:
- Set your prices
- Specify job types
- Define resource limits
- Build reputation
## Safety Features
- Escrow payments
- Dispute resolution
- Reputation system
- Cryptographic proofs

View File

@@ -1,27 +0,0 @@
---
title: User Guide Overview
description: Learn how to use AITBC as a user
---
# User Guide Overview
Welcome to the AITBC user guide! This section will help you understand how to interact with the AITBC platform.
## What You'll Learn
- Creating and submitting AI jobs
- Using the marketplace
- Managing your wallet
- Monitoring your jobs
- Understanding receipts and proofs
## Getting Started
If you're new to AITBC, start with the [Quickstart Guide](../getting-started/quickstart.md).
## Navigation
- [Creating Jobs](creating-jobs.md) - Learn to submit AI workloads
- [Marketplace](marketplace.md) - Buy and sell AI services
- [Explorer](explorer.md) - Browse the blockchain
- [Wallet Management](wallet-management.md) - Manage your funds

View File

@@ -1,65 +0,0 @@
---
title: Wallet Management
description: Managing your AITBC wallet
---
# Wallet Management
Your AITBC wallet allows you to store, send, and receive AITBC tokens and interact with the platform.
## Creating a Wallet
### New Wallet
```bash
aitbc wallet create
```
### Import Existing
```bash
aitbc wallet import <private_key>
```
## Wallet Operations
### Check Balance
```bash
aitbc wallet balance
```
### Send Tokens
```bash
aitbc wallet send <address> <amount>
```
### Transaction History
```bash
aitbc wallet history
```
## Security
- Never share your private key
- Use a hardware wallet for large amounts
- Enable two-factor authentication
- Keep backups in secure locations
## Staking
Earn rewards by staking your tokens:
```bash
aitbc wallet stake <amount>
```
## Backup
Always backup your wallet:
```bash
aitbc wallet backup --output wallet.backup
```
## Recovery
Restore from backup:
```bash
aitbc wallet restore --input wallet.backup
```

View File

@@ -1,628 +0,0 @@
"""
Ecosystem Analytics Service for AITBC
Tracks and analyzes ecosystem metrics including:
- Hackathon participation and outcomes
- Grant program effectiveness
- Extension adoption and usage
- Developer engagement
- Network effects and cross-chain activity
"""
import asyncio
import json
from datetime import datetime, timedelta
from typing import Dict, List, Any, Optional
from dataclasses import dataclass, asdict
from decimal import Decimal
import pandas as pd
import plotly.graph_objects as go
import plotly.express as px
from sqlalchemy import create_engine, select, func
from sqlalchemy.orm import sessionmaker
# Configuration - in production, this would come from environment variables or config file
class Settings:
DATABASE_URL = "postgresql://user:pass@localhost/aitbc"
settings = Settings()
@dataclass
class EcosystemMetric:
"""Base class for ecosystem metrics"""
timestamp: datetime
metric_name: str
value: float
unit: str
dimensions: Dict[str, Any]
source: str
@dataclass
class HackathonMetric:
"""Hackathon-specific metrics"""
event_id: str
event_name: str
start_date: datetime
end_date: datetime
participants: int
submissions: int
winners: int
projects_deployed: int
github_stars: int
community_engagement: float
technical_score: float
innovation_score: float
@dataclass
class GrantMetric:
"""Grant program metrics"""
grant_id: str
project_name: str
amount_awarded: Decimal
amount_disbursed: Decimal
milestones_completed: int
total_milestones: int
users_acquired: int
github_contributors: int
code_commits: int
documentation_score: float
community_score: float
@dataclass
class ExtensionMetric:
"""Extension/connector metrics"""
extension_id: str
extension_name: str
downloads: int
active_installations: int
api_calls: int
error_rate: float
avg_response_time: float
user_satisfaction: float
integration_count: int
revenue_generated: Decimal
class EcosystemAnalyticsService:
"""Main analytics service for ecosystem metrics"""
def __init__(self):
self.engine = create_engine(settings.DATABASE_URL)
self.Session = sessionmaker(bind=self.engine)
self.logger = __import__('logging').getLogger(__name__)
async def collect_hackathon_metrics(self, event_id: str) -> HackathonMetric:
"""Collect metrics for a specific hackathon"""
with self.Session() as db:
# Get event details
event = self._get_hackathon_event(db, event_id)
if not event:
raise ValueError(f"Hackathon {event_id} not found")
# Collect participant metrics
participants = await self._count_participants(event_id)
submissions = await self._count_submissions(event_id)
# Get project metrics
projects = await self._get_hackathon_projects(event_id)
projects_deployed = sum(1 for p in projects if p.get('deployed', False))
# Calculate engagement scores
community_engagement = await self._calculate_community_engagement(event_id)
technical_scores = [p.get('technical_score', 0) for p in projects]
innovation_scores = [p.get('innovation_score', 0) for p in projects]
# Get GitHub metrics
github_stars = sum(p.get('github_stars', 0) for p in projects)
metric = HackathonMetric(
event_id=event_id,
event_name=event['name'],
start_date=event['start_date'],
end_date=event['end_date'],
participants=participants,
submissions=submissions,
winners=len([p for p in projects if p.get('winner', False)]),
projects_deployed=projects_deployed,
github_stars=github_stars,
community_engagement=community_engagement,
technical_score=sum(technical_scores) / len(technical_scores) if technical_scores else 0,
innovation_score=sum(innovation_scores) / len(innovation_scores) if innovation_scores else 0
)
# Store metrics
await self._store_metric(metric)
return metric
async def collect_grant_metrics(self, grant_id: str) -> GrantMetric:
"""Collect metrics for a specific grant"""
with self.Session() as db:
# Get grant details
grant = self._get_grant_details(db, grant_id)
if not grant:
raise ValueError(f"Grant {grant_id} not found")
# Get project metrics
project = await self._get_grant_project(grant_id)
# Calculate completion metrics
milestones_completed = await self._count_completed_milestones(grant_id)
total_milestones = grant.get('total_milestones', 1)
# Get adoption metrics
users_acquired = await self._count_project_users(grant_id)
github_contributors = await self._count_github_contributors(project.get('repo_url'))
code_commits = await self._count_code_commits(project.get('repo_url'))
# Calculate quality scores
documentation_score = await self._evaluate_documentation(project.get('docs_url'))
community_score = await self._evaluate_community_health(project.get('repo_url'))
metric = GrantMetric(
grant_id=grant_id,
project_name=grant['project_name'],
amount_awarded=Decimal(str(grant.get('amount_awarded', 0))),
amount_disbursed=Decimal(str(grant.get('amount_disbursed', 0))),
milestones_completed=milestones_completed,
total_milestones=total_milestones,
users_acquired=users_acquired,
github_contributors=github_contributors,
code_commits=code_commits,
documentation_score=documentation_score,
community_score=community_score
)
# Store metrics
await self._store_metric(metric)
return metric
async def collect_extension_metrics(self, extension_id: str) -> ExtensionMetric:
"""Collect metrics for a specific extension"""
with self.Session() as db:
# Get extension details
extension = self._get_extension_details(db, extension_id)
if not extension:
raise ValueError(f"Extension {extension_id} not found")
# Get usage metrics
downloads = await self._count_downloads(extension_id)
active_installations = await self._count_active_installations(extension_id)
# Get performance metrics
api_calls = await self._count_api_calls(extension_id, days=30)
error_rate = await self._calculate_error_rate(extension_id, days=30)
avg_response_time = await self._calculate_avg_response_time(extension_id, days=30)
# Get quality metrics
user_satisfaction = await self._calculate_user_satisfaction(extension_id)
integration_count = await self._count_integrations(extension_id)
# Get business metrics
revenue_generated = await self._calculate_revenue(extension_id, days=30)
metric = ExtensionMetric(
extension_id=extension_id,
extension_name=extension['name'],
downloads=downloads,
active_installations=active_installations,
api_calls=api_calls,
error_rate=error_rate,
avg_response_time=avg_response_time,
user_satisfaction=user_satisfaction,
integration_count=integration_count,
revenue_generated=Decimal(str(revenue_generated))
)
# Store metrics
await self._store_metric(metric)
return metric
async def generate_ecosystem_dashboard(self, days: int = 30) -> Dict[str, Any]:
"""Generate comprehensive ecosystem dashboard"""
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
dashboard = {
"period": {
"start": start_date.isoformat(),
"end": end_date.isoformat(),
"days": days
},
"summary": await self._generate_summary_metrics(start_date, end_date),
"hackathons": await self._generate_hackathon_section(start_date, end_date),
"grants": await self._generate_grant_section(start_date, end_date),
"extensions": await self._generate_extension_section(start_date, end_date),
"network_effects": await self._generate_network_effects(start_date, end_date)
}
return dashboard
async def generate_hackathon_report(self, event_id: str) -> Dict[str, Any]:
"""Generate detailed hackathon report"""
metric = await self.collect_hackathon_metrics(event_id)
# Generate visualizations
figures = {}
# Participation funnel
fig_funnel = go.Figure(go.Funnel(
y=["Registrations", "Active Participants", "Submissions", "Deployed Projects", "Winners"],
x=[
metric.participants * 1.5, # Estimated registrations
metric.participants,
metric.submissions,
metric.projects_deployed,
metric.winners
]
))
fig_funnel.update_layout(title="Hackathon Participation Funnel")
figures['funnel'] = fig_funnel.to_json()
# Score distribution
fig_scores = go.Figure()
fig_scores.add_trace(go.Scatter(
x=list(range(metric.submissions)),
y=[{'technical_score': 75, 'innovation_score': 85}] * metric.submissions, # Sample data
mode='markers',
name='Projects'
))
fig_scores.update_layout(title="Project Scores Distribution")
figures['scores'] = fig_scores.to_json()
# Project categories
categories = ['DeFi', 'Enterprise', 'Developer Tools', 'Analytics', 'Other']
counts = [15, 20, 10, 8, 12] # Sample data
fig_categories = px.pie(
values=counts,
names=categories,
title="Project Categories"
)
figures['categories'] = fig_categories.to_json()
report = {
"event": asdict(metric),
"figures": figures,
"insights": await self._generate_hackathon_insights(metric),
"recommendations": await self._generate_hackathon_recommendations(metric)
}
return report
async def generate_grant_impact_report(self, grant_id: str) -> Dict[str, Any]:
"""Generate grant impact report"""
metric = await self.collect_grant_metrics(grant_id)
# Generate ROI analysis
roi_analysis = await self._calculate_grant_roi(metric)
# Generate adoption curve
adoption_data = await self._get_adoption_curve(grant_id)
fig_adoption = px.line(
x=[d['date'] for d in adoption_data],
y=[d['users'] for d in adoption_data],
title="User Adoption Over Time"
)
report = {
"grant": asdict(metric),
"roi_analysis": roi_analysis,
"adoption_chart": fig_adoption.to_json(),
"milestone_progress": {
"completed": metric.milestones_completed,
"total": metric.total_milestones,
"percentage": (metric.milestones_completed / metric.total_milestones * 100) if metric.total_milestones > 0 else 0
},
"quality_metrics": {
"documentation": metric.documentation_score,
"community": metric.community_score,
"overall": (metric.documentation_score + metric.community_score) / 2
}
}
return report
async def export_metrics(self, metric_type: str, format: str = "csv") -> bytes:
"""Export metrics in specified format"""
# Get metrics data
if metric_type == "hackathons":
data = await self._get_all_hackathon_metrics()
elif metric_type == "grants":
data = await self._get_all_grant_metrics()
elif metric_type == "extensions":
data = await self._get_all_extension_metrics()
else:
raise ValueError(f"Unknown metric type: {metric_type}")
# Convert to DataFrame
df = pd.DataFrame([asdict(m) for m in data])
# Export in requested format
if format == "csv":
return df.to_csv(index=False).encode('utf-8')
elif format == "json":
return df.to_json(orient='records', indent=2).encode('utf-8')
elif format == "excel":
return df.to_excel(index=False).encode('utf-8')
else:
raise ValueError(f"Unsupported format: {format}")
# Private helper methods
async def _store_metric(self, metric: Any):
"""Store metric in database"""
# Implementation would store in metrics table
pass
async def _count_participants(self, event_id: str) -> int:
"""Count hackathon participants"""
# Implementation would query participant data
return 150 # Sample
async def _count_submissions(self, event_id: str) -> int:
"""Count hackathon submissions"""
return 45 # Sample
async def _get_hackathon_projects(self, event_id: str) -> List[Dict]:
"""Get all projects from hackathon"""
# Implementation would query project data
return [] # Sample
async def _calculate_community_engagement(self, event_id: str) -> float:
"""Calculate community engagement score"""
return 85.5 # Sample
async def _count_completed_milestones(self, grant_id: str) -> int:
"""Count completed grant milestones"""
return 3 # Sample
async def _count_project_users(self, grant_id: str) -> int:
"""Count users of grant project"""
return 500 # Sample
async def _count_github_contributors(self, repo_url: str) -> int:
"""Count GitHub contributors"""
return 12 # Sample
async def _count_code_commits(self, repo_url: str) -> int:
"""Count code commits"""
return 234 # Sample
async def _evaluate_documentation(self, docs_url: str) -> float:
"""Evaluate documentation quality"""
return 90.0 # Sample
async def _evaluate_community_health(self, repo_url: str) -> float:
"""Evaluate community health"""
return 75.5 # Sample
async def _count_downloads(self, extension_id: str) -> int:
"""Count extension downloads"""
return 1250 # Sample
async def _count_active_installations(self, extension_id: str) -> int:
"""Count active installations"""
return 350 # Sample
async def _count_api_calls(self, extension_id: str, days: int) -> int:
"""Count API calls to extension"""
return 15000 # Sample
async def _calculate_error_rate(self, extension_id: str, days: int) -> float:
"""Calculate error rate"""
return 0.02 # Sample
async def _calculate_avg_response_time(self, extension_id: str, days: int) -> float:
"""Calculate average response time"""
return 125.5 # Sample
async def _calculate_user_satisfaction(self, extension_id: str) -> float:
"""Calculate user satisfaction score"""
return 4.5 # Sample
async def _count_integrations(self, extension_id: str) -> int:
"""Count integrations using extension"""
return 25 # Sample
async def _calculate_revenue(self, extension_id: str, days: int) -> float:
"""Calculate revenue generated"""
return 5000.0 # Sample
async def _generate_summary_metrics(self, start_date: datetime, end_date: datetime) -> Dict:
"""Generate summary metrics for dashboard"""
return {
"total_hackathons": 4,
"total_participants": 600,
"total_grants_awarded": 12,
"total_grant_amount": 500000,
"active_extensions": 25,
"total_downloads": 50000,
"github_stars": 2500,
"community_members": 1500
}
async def _generate_hackathon_section(self, start_date: datetime, end_date: datetime) -> Dict:
"""Generate hackathon section of dashboard"""
return {
"upcoming": [],
"recent": [],
"top_projects": [],
"participation_trend": []
}
async def _generate_grant_section(self, start_date: datetime, end_date: datetime) -> Dict:
"""Generate grant section of dashboard"""
return {
"active_grants": 8,
"completed_grants": 4,
"total_disbursed": 350000,
"roi_average": 2.5,
"success_rate": 0.85
}
async def _generate_extension_section(self, start_date: datetime, end_date: datetime) -> Dict:
"""Generate extension section of dashboard"""
return {
"total_extensions": 25,
"new_extensions": 3,
"most_popular": [],
"growth_rate": 0.15
}
async def _generate_network_effects(self, start_date: datetime, end_date: datetime) -> Dict:
"""Generate network effects analysis"""
return {
"cross_chain_volume": 1000000,
"interoperability_score": 85.5,
"network_value": 25000000,
"metcalfe_coefficient": 1.2
}
async def _generate_hackathon_insights(self, metric: HackathonMetric) -> List[str]:
"""Generate insights from hackathon metrics"""
insights = []
if metric.projects_deployed / metric.submissions > 0.5:
insights.append("High deployment rate indicates strong technical execution")
if metric.community_engagement > 80:
insights.append("Excellent community engagement and participation")
if metric.github_stars > 100:
insights.append("Strong GitHub community interest")
return insights
async def _generate_hackathon_recommendations(self, metric: HackathonMetric) -> List[str]:
"""Generate recommendations for improvement"""
recommendations = []
if metric.projects_deployed / metric.submissions < 0.3:
recommendations.append("Provide more deployment support and infrastructure")
if metric.technical_score < 70:
recommendations.append("Offer technical workshops and mentorship")
if metric.innovation_score < 70:
recommendations.append("Encourage more innovative and ambitious projects")
return recommendations
async def _calculate_grant_roi(self, metric: GrantMetric) -> Dict:
"""Calculate grant ROI"""
if metric.amount_disbursed == 0:
return {"roi": 0, "payback_period": None}
# Simplified ROI calculation
estimated_value = metric.users_acquired * 100 # $100 per user
roi = (estimated_value - float(metric.amount_disbursed)) / float(metric.amount_disbursed)
return {
"roi": roi,
"payback_period": "12 months" if roi > 0 else None,
"value_created": estimated_value
}
async def _get_adoption_curve(self, grant_id: str) -> List[Dict]:
"""Get user adoption over time"""
# Sample data
return [
{"date": "2024-01-01", "users": 50},
{"date": "2024-02-01", "users": 120},
{"date": "2024-03-01", "users": 200},
{"date": "2024-04-01", "users": 350},
{"date": "2024-05-01", "users": 500}
]
def _get_hackathon_event(self, db, event_id: str) -> Optional[Dict]:
"""Get hackathon event details"""
# Implementation would query database
return {
"name": "DeFi Innovation Hackathon",
"start_date": datetime(2024, 1, 15),
"end_date": datetime(2024, 1, 22)
}
def _get_grant_details(self, db, grant_id: str) -> Optional[Dict]:
"""Get grant details"""
# Implementation would query database
return {
"project_name": "Advanced Analytics Platform",
"amount_awarded": 50000,
"amount_disbursed": 25000,
"total_milestones": 4
}
def _get_extension_details(self, db, extension_id: str) -> Optional[Dict]:
"""Get extension details"""
# Implementation would query database
return {
"name": "SAP ERP Connector"
}
async def _get_grant_project(self, grant_id: str) -> Dict:
"""Get grant project details"""
return {
"repo_url": "https://github.com/example/project",
"docs_url": "https://docs.example.com"
}
async def _get_all_hackathon_metrics(self) -> List[HackathonMetric]:
"""Get all hackathon metrics"""
# Implementation would query database
return []
async def _get_all_grant_metrics(self) -> List[GrantMetric]:
"""Get all grant metrics"""
# Implementation would query database
return []
async def _get_all_extension_metrics(self) -> List[ExtensionMetric]:
"""Get all extension metrics"""
# Implementation would query database
return []
# CLI interface for analytics service
async def main():
"""CLI entry point"""
import argparse
parser = argparse.ArgumentParser(description="AITBC Ecosystem Analytics")
parser.add_argument("--dashboard", action="store_true", help="Generate ecosystem dashboard")
parser.add_argument("--hackathon", help="Generate hackathon report for event ID")
parser.add_argument("--grant", help="Generate grant impact report for grant ID")
parser.add_argument("--export", choices=["hackathons", "grants", "extensions"], help="Export metrics")
parser.add_argument("--format", choices=["csv", "json", "excel"], default="json", help="Export format")
parser.add_argument("--days", type=int, default=30, help="Number of days for dashboard")
args = parser.parse_args()
service = EcosystemAnalyticsService()
if args.dashboard:
dashboard = await service.generate_ecosystem_dashboard(args.days)
print(json.dumps(dashboard, indent=2, default=str))
elif args.hackathon:
report = await service.generate_hackathon_report(args.hackathon)
print(json.dumps(report, indent=2, default=str))
elif args.grant:
report = await service.generate_grant_impact_report(args.grant)
print(json.dumps(report, indent=2, default=str))
elif args.export:
data = await service.export_metrics(args.export, args.format)
print(data.decode())
else:
parser.print_help()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,927 +0,0 @@
"""
Ecosystem KPI Tracker for AITBC
Tracks key performance indicators for ecosystem health and strategy reviews
"""
import asyncio
import json
from datetime import datetime, timedelta
from typing import Dict, List, Any, Optional
from dataclasses import dataclass, asdict
from decimal import Decimal
import pandas as pd
import plotly.graph_objects as go
import plotly.express as px
from sqlalchemy import create_engine, select, func, and_, or_
from sqlalchemy.orm import sessionmaker
from enum import Enum
from ..config import settings
from ..database import get_db
class KPICategory(Enum):
"""Categories of KPIs"""
MARKETPLACE = "marketplace"
CROSS_CHAIN = "cross_chain"
DEVELOPER = "developer"
USER = "user"
FINANCIAL = "financial"
TECHNICAL = "technical"
@dataclass
class KPIDefinition:
"""Definition of a KPI"""
name: str
category: KPICategory
description: str
unit: str
target: Optional[float]
calculation_method: str
data_sources: List[str]
frequency: str # daily, weekly, monthly
importance: str # high, medium, low
@dataclass
class KPIValue:
"""A single KPI measurement"""
timestamp: datetime
kpi_name: str
value: float
unit: str
category: str
metadata: Dict[str, Any]
class EcosystemKPITracker:
"""Main KPI tracking system"""
def __init__(self):
self.engine = create_engine(settings.DATABASE_URL)
self.Session = sessionmaker(bind=self.engine)
self.logger = __import__('logging').getLogger(__name__)
# Define all tracked KPIs
self.kpi_definitions = self._initialize_kpi_definitions()
def _initialize_kpi_definitions(self) -> Dict[str, KPIDefinition]:
"""Initialize all KPI definitions"""
return {
# Marketplace KPIs
"active_marketplaces": KPIDefinition(
name="active_marketplaces",
category=KPICategory.MARKETPLACE,
description="Number of active marketplaces on the platform",
unit="count",
target=50.0,
calculation_method="count_active_marketplaces",
data_sources=["marketplace_service", "tenant_db"],
frequency="daily",
importance="high"
),
"total_volume_usd": KPIDefinition(
name="total_volume_usd",
category=KPICategory.MARKETPLACE,
description="Total transaction volume in USD",
unit="USD",
target=10000000.0,
calculation_method="sum_transaction_volume",
data_sources=["transaction_db", "price_oracle"],
frequency="daily",
importance="high"
),
"marketplace_utilization": KPIDefinition(
name="marketplace_utilization",
category=KPICategory.MARKETPLACE,
description="Percentage of utilized marketplace capacity",
unit="percent",
target=75.0,
calculation_method="calculate_utilization",
data_sources=["marketplace_service", "usage_metrics"],
frequency="hourly",
importance="medium"
),
# Cross-Chain KPIs
"cross_chain_volume": KPIDefinition(
name="cross_chain_volume",
category=KPICategory.CROSS_CHAIN,
description="Total cross-chain transaction volume",
unit="USD",
target=5000000.0,
calculation_method="sum_cross_chain_volume",
data_sources=["bridge_service", "transaction_db"],
frequency="daily",
importance="high"
),
"active_bridges": KPIDefinition(
name="active_bridges",
category=KPICategory.CROSS_CHAIN,
description="Number of active cross-chain bridges",
unit="count",
target=10.0,
calculation_method="count_active_bridges",
data_sources=["bridge_service"],
frequency="daily",
importance="medium"
),
"bridge_success_rate": KPIDefinition(
name="bridge_success_rate",
category=KPICategory.CROSS_CHAIN,
description="Success rate of cross-chain transactions",
unit="percent",
target=95.0,
calculation_method="calculate_bridge_success_rate",
data_sources=["bridge_service", "transaction_db"],
frequency="hourly",
importance="high"
),
# Developer KPIs
"active_developers": KPIDefinition(
name="active_developers",
category=KPICategory.DEVELOPER,
description="Number of active developers in ecosystem",
unit="count",
target=1000.0,
calculation_method="count_active_developers",
data_sources=["github_api", "developer_db"],
frequency="weekly",
importance="high"
),
"new_extensions": KPIDefinition(
name="new_extensions",
category=KPICategory.DEVELOPER,
description="Number of new marketplace extensions created",
unit="count",
target=25.0,
calculation_method="count_new_extensions",
data_sources=["extension_registry", "github_api"],
frequency="weekly",
importance="medium"
),
"developer_satisfaction": KPIDefinition(
name="developer_satisfaction",
category=KPICategory.DEVELOPER,
description="Developer satisfaction score (1-5)",
unit="score",
target=4.5,
calculation_method="calculate_satisfaction_score",
data_sources=["surveys", "github_issues", "discord_sentiment"],
frequency="monthly",
importance="medium"
),
# User KPIs
"active_users": KPIDefinition(
name="active_users",
category=KPICategory.USER,
description="Number of active users (30-day)",
unit="count",
target=10000.0,
calculation_method="count_active_users",
data_sources=["user_db", "auth_service"],
frequency="daily",
importance="high"
),
"user_retention": KPIDefinition(
name="user_retention",
category=KPICategory.USER,
description="30-day user retention rate",
unit="percent",
target=80.0,
calculation_method="calculate_retention_rate",
data_sources=["user_db", "analytics_service"],
frequency="weekly",
importance="high"
),
"net_promoter_score": KPIDefinition(
name="net_promoter_score",
category=KPICategory.USER,
description="Net Promoter Score",
unit="score",
target=50.0,
calculation_method="calculate_nps",
data_sources=["surveys", "feedback_service"],
frequency="monthly",
importance="medium"
),
# Financial KPIs
"revenue": KPIDefinition(
name="revenue",
category=KPICategory.FINANCIAL,
description="Total platform revenue",
unit="USD",
target=1000000.0,
calculation_method="calculate_revenue",
data_sources=["billing_service", "payment_processor"],
frequency="monthly",
importance="high"
),
"cost_per_transaction": KPIDefinition(
name="cost_per_transaction",
category=KPICategory.FINANCIAL,
description="Average cost per transaction",
unit="USD",
target=0.10,
calculation_method="calculate_cost_per_tx",
data_sources=["billing_service", "metrics_service"],
frequency="monthly",
importance="medium"
),
"profit_margin": KPIDefinition(
name="profit_margin",
category=KPICategory.FINANCIAL,
description="Platform profit margin",
unit="percent",
target=20.0,
calculation_method="calculate_profit_margin",
data_sources=["billing_service", "financial_db"],
frequency="quarterly",
importance="high"
),
# Technical KPIs
"network_hash_rate": KPIDefinition(
name="network_hash_rate",
category=KPICategory.TECHNICAL,
description="Network hash rate",
unit="H/s",
target=1000000000.0,
calculation_method="get_hash_rate",
data_sources=["blockchain_node", "metrics_service"],
frequency="hourly",
importance="high"
),
"block_time": KPIDefinition(
name="block_time",
category=KPICategory.TECHNICAL,
description="Average block time",
unit="seconds",
target=12.0,
calculation_method="calculate_average_block_time",
data_sources=["blockchain_node", "block_db"],
frequency="hourly",
importance="high"
),
"uptime": KPIDefinition(
name="uptime",
category=KPICategory.TECHNICAL,
description="Platform uptime percentage",
unit="percent",
target=99.9,
calculation_method="calculate_uptime",
data_sources=["monitoring_service", "health_checks"],
frequency="daily",
importance="high"
),
}
async def collect_all_kpis(self, period: str = "daily") -> List[KPIValue]:
"""Collect all KPIs for a given period"""
kpi_values = []
for kpi_name, kpi_def in self.kpi_definitions.items():
if kpi_def.frequency == period or period == "all":
try:
value = await self._calculate_kpi(kpi_name, kpi_def)
kpi_value = KPIValue(
timestamp=datetime.utcnow(),
kpi_name=kpi_name,
value=value,
unit=kpi_def.unit,
category=kpi_def.category.value,
metadata={
"target": kpi_def.target,
"importance": kpi_def.importance,
}
)
kpi_values.append(kpi_value)
except Exception as e:
self.logger.error(f"Failed to calculate KPI {kpi_name}: {e}")
# Store KPIs
await self._store_kpis(kpi_values)
return kpi_values
async def _calculate_kpi(self, kpi_name: str, kpi_def: KPIDefinition) -> float:
"""Calculate a specific KPI"""
method_name = kpi_def.calculation_method
method = getattr(self, method_name, None)
if method is None:
raise ValueError(f"Unknown calculation method: {method_name}")
return await method()
async def _store_kpis(self, kpi_values: List[KPIValue]):
"""Store KPI values in database"""
with self.Session() as db:
for kpi in kpi_values:
# Implementation would store in KPI table
pass
# KPI Calculation Methods
async def count_active_marketplaces(self) -> float:
"""Count active marketplaces"""
with self.Session() as db:
# Query active tenants with marketplace enabled
count = db.execute(
select(func.count(Tenant.id))
.where(
and_(
Tenant.status == "active",
Tenant.features.contains(["marketplace"])
)
)
).scalar()
return float(count)
async def sum_transaction_volume(self) -> float:
"""Sum total transaction volume in USD"""
with self.Session() as db:
# Get transactions in last 24 hours
total = db.execute(
select(func.sum(Transaction.amount_usd))
.where(
Transaction.timestamp >= datetime.utcnow() - timedelta(days=1)
)
).scalar()
return float(total or 0)
async def calculate_utilization(self) -> float:
"""Calculate marketplace utilization percentage"""
# Get total capacity and used capacity
total_capacity = await self._get_total_capacity()
used_capacity = await self._get_used_capacity()
if total_capacity == 0:
return 0.0
return (used_capacity / total_capacity) * 100
async def sum_cross_chain_volume(self) -> float:
"""Sum cross-chain transaction volume"""
with self.Session() as db:
total = db.execute(
select(func.sum(CrossChainTransaction.amount_usd))
.where(
CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(days=1)
)
).scalar()
return float(total or 0)
async def count_active_bridges(self) -> float:
"""Count active cross-chain bridges"""
# Query bridge service
bridges = await self._query_bridge_service("/bridges?status=active")
return float(len(bridges))
async def calculate_bridge_success_rate(self) -> float:
"""Calculate bridge transaction success rate"""
with self.Session() as db:
total = db.execute(
select(func.count(CrossChainTransaction.id))
.where(
CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(hours=24)
)
).scalar()
successful = db.execute(
select(func.count(CrossChainTransaction.id))
.where(
and_(
CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(hours=24),
CrossChainTransaction.status == "completed"
)
)
).scalar()
if total == 0:
return 100.0
return (successful / total) * 100
async def count_active_developers(self) -> float:
"""Count active developers (last 30 days)"""
# Query GitHub API and local records
github_contributors = await self._query_github_api("/contributors")
local_developers = await self._count_local_developers()
# Combine and deduplicate
all_developers = set(github_contributors + local_developers)
return float(len(all_developers))
async def count_new_extensions(self) -> float:
"""Count new extensions this week"""
with self.Session() as db:
count = db.execute(
select(func.count(Extension.id))
.where(
Extension.created_at >= datetime.utcnow() - timedelta(weeks=1)
)
).scalar()
return float(count)
async def calculate_satisfaction_score(self) -> float:
"""Calculate developer satisfaction score"""
# Aggregate from multiple sources
survey_scores = await self._get_survey_scores()
issue_sentiment = await self._analyze_issue_sentiment()
discord_sentiment = await self._analyze_discord_sentiment()
# Weighted average
weights = {"survey": 0.5, "issues": 0.25, "discord": 0.25}
score = (
survey_scores * weights["survey"] +
issue_sentiment * weights["issues"] +
discord_sentiment * weights["discord"]
)
return score
async def count_active_users(self) -> float:
"""Count active users (last 30 days)"""
with self.Session() as db:
count = db.execute(
select(func.count(User.id))
.where(
User.last_active >= datetime.utcnow() - timedelta(days=30)
)
).scalar()
return float(count)
async def calculate_retention_rate(self) -> float:
"""Calculate 30-day user retention rate"""
# Cohort analysis
cohort_users = await self._get_cohort_users(30) # Users from 30 days ago
retained_users = await self._count_retained_users(cohort_users)
if not cohort_users:
return 0.0
return (retained_users / len(cohort_users)) * 100
async def calculate_nps(self) -> float:
"""Calculate Net Promoter Score"""
responses = await self._get_nps_responses()
if not responses:
return 0.0
promoters = sum(1 for r in responses if r >= 9)
detractors = sum(1 for r in responses if r <= 6)
nps = ((promoters - detractors) / len(responses)) * 100
return nps
async def calculate_revenue(self) -> float:
"""Calculate total platform revenue"""
with self.Session() as db:
total = db.execute(
select(func.sum(Revenue.amount))
.where(
Revenue.period == "monthly"
)
).scalar()
return float(total or 0)
async def calculate_cost_per_tx(self) -> float:
"""Calculate cost per transaction"""
total_cost = await self._get_monthly_costs()
tx_count = await self._get_monthly_tx_count()
if tx_count == 0:
return 0.0
return total_cost / tx_count
async def calculate_profit_margin(self) -> float:
"""Calculate profit margin percentage"""
revenue = await self.calculate_revenue()
costs = await self._get_monthly_costs()
if revenue == 0:
return 0.0
profit = revenue - costs
return (profit / revenue) * 100
async def get_hash_rate(self) -> float:
"""Get current network hash rate"""
# Query blockchain node metrics
metrics = await self._query_blockchain_metrics()
return float(metrics.get("hash_rate", 0))
async def calculate_average_block_time(self) -> float:
"""Calculate average block time"""
with self.Session() as db:
avg_time = db.execute(
select(func.avg(Block.timestamp_diff))
.where(
Block.timestamp >= datetime.utcnow() - timedelta(hours=1)
)
).scalar()
return float(avg_time or 0)
async def calculate_uptime(self) -> float:
"""Calculate platform uptime percentage"""
# Get uptime from monitoring service
uptime_data = await self._query_monitoring_service("/uptime")
return float(uptime_data.get("uptime_percentage", 0))
# Helper methods for data collection
async def _get_total_capacity(self) -> float:
"""Get total marketplace capacity"""
# Implementation would query marketplace service
return 10000.0 # Sample
async def _get_used_capacity(self) -> float:
"""Get used marketplace capacity"""
# Implementation would query usage metrics
return 7500.0 # Sample
async def _query_bridge_service(self, endpoint: str) -> List[Dict]:
"""Query bridge service API"""
# Implementation would make HTTP request
return [] # Sample
async def _query_github_api(self, endpoint: str) -> List[str]:
"""Query GitHub API"""
# Implementation would use GitHub API
return [] # Sample
async def _count_local_developers(self) -> List[str]:
"""Count local developers"""
with self.Session() as db:
developers = db.execute(
select(Developer.github_username)
.where(
Developer.last_active >= datetime.utcnow() - timedelta(days=30)
)
).all()
return [d[0] for d in developers]
async def _get_survey_scores(self) -> float:
"""Get survey satisfaction scores"""
# Implementation would query survey service
return 4.2 # Sample
async def _analyze_issue_sentiment(self) -> float:
"""Analyze GitHub issue sentiment"""
# Implementation would use sentiment analysis
return 3.8 # Sample
async def _analyze_discord_sentiment(self) -> float:
"""Analyze Discord message sentiment"""
# Implementation would use sentiment analysis
return 4.0 # Sample
async def _get_cohort_users(self, days_ago: int) -> List[str]:
"""Get users from a specific cohort"""
with self.Session() as db:
cohort_date = datetime.utcnow() - timedelta(days=days_ago)
users = db.execute(
select(User.id)
.where(
and_(
User.created_at >= cohort_date,
User.created_at < cohort_date + timedelta(days=1)
)
)
).all()
return [u[0] for u in users]
async def _count_retained_users(self, user_ids: List[str]) -> int:
"""Count how many users are still active"""
with self.Session() as db:
count = db.execute(
select(func.count(User.id))
.where(
and_(
User.id.in_(user_ids),
User.last_active >= datetime.utcnow() - timedelta(days=30)
)
)
).scalar()
return count
async def _get_nps_responses(self) -> List[int]:
"""Get NPS survey responses"""
# Implementation would query survey service
return [9, 10, 8, 7, 9, 10, 6, 9] # Sample
async def _get_monthly_costs(self) -> float:
"""Get monthly operational costs"""
# Implementation would query financial service
return 800000.0 # Sample
async def _get_monthly_tx_count(self) -> int:
"""Get monthly transaction count"""
with self.Session() as db:
count = db.execute(
select(func.count(Transaction.id))
.where(
Transaction.timestamp >= datetime.utcnow() - timedelta(days=30)
)
).scalar()
return count
async def _query_blockchain_metrics(self) -> Dict[str, float]:
"""Query blockchain node metrics"""
# Implementation would query blockchain node
return {"hash_rate": 1000000000.0} # Sample
async def _query_monitoring_service(self, endpoint: str) -> Dict[str, float]:
"""Query monitoring service"""
# Implementation would query monitoring service
return {"uptime_percentage": 99.95} # Sample
async def generate_kpi_dashboard(self, period: str = "monthly") -> Dict[str, Any]:
"""Generate comprehensive KPI dashboard"""
# Collect all KPIs
kpis = await self.collect_all_kpis("all")
# Group by category
by_category = {}
for kpi in kpis:
if kpi.category not in by_category:
by_category[kpi.category] = []
by_category[kpi.category].append(kpi)
# Calculate health scores
health_scores = await self._calculate_health_scores(by_category)
# Generate insights
insights = await self._generate_insights(kpis)
# Create visualizations
charts = await self._create_charts(kpis)
return {
"timestamp": datetime.utcnow().isoformat(),
"period": period,
"kpis": [asdict(kpi) for kpi in kpis],
"by_category": {
cat: [asdict(kpi) for kpi in kpis]
for cat, kpis in by_category.items()
},
"health_scores": health_scores,
"insights": insights,
"charts": charts,
}
async def _calculate_health_scores(self, by_category: Dict[str, List[KPIValue]]) -> Dict[str, float]:
"""Calculate health scores for each category"""
scores = {}
for category, kpis in by_category.items():
if not kpis:
scores[category] = 0.0
continue
# Weight by importance
total_score = 0.0
total_weight = 0.0
for kpi in kpis:
target = kpi.metadata.get("target", 0)
if target == 0:
continue
# Calculate score as percentage of target
score = min((kpi.value / target) * 100, 100)
# Apply importance weight
weight = {"high": 3, "medium": 2, "low": 1}.get(
kpi.metadata.get("importance", "medium"), 2
)
total_score += score * weight
total_weight += weight
if total_weight > 0:
scores[category] = total_score / total_weight
else:
scores[category] = 0.0
return scores
async def _generate_insights(self, kpis: List[KPIValue]) -> List[str]:
"""Generate insights from KPI data"""
insights = []
# Analyze trends
for kpi in kpis:
if kpi.value < (kpi.metadata.get("target", 0) * 0.8):
insights.append(
f"⚠️ {kpi.kpi_name} is below target ({kpi.value:.2f} vs {kpi.metadata.get('target')})"
)
elif kpi.value > (kpi.metadata.get("target", 0) * 1.2):
insights.append(
f"🎉 {kpi.kpi_name} exceeds target ({kpi.value:.2f} vs {kpi.metadata.get('target')})"
)
# Cross-category insights
marketplace_kpis = [k for k in kpis if k.category == "marketplace"]
if marketplace_kpis:
volume_kpi = next((k for k in marketplace_kpis if k.kpi_name == "total_volume_usd"), None)
utilization_kpi = next((k for k in marketplace_kpis if k.kpi_name == "marketplace_utilization"), None)
if volume_kpi and utilization_kpi:
if volume_kpi.value > 1000000 and utilization_kpi.value < 50:
insights.append(
"💡 High volume but low utilization - consider increasing capacity"
)
return insights[:10] # Limit to top 10 insights
async def _create_charts(self, kpis: List[KPIValue]) -> Dict[str, str]:
"""Create chart visualizations"""
charts = {}
# KPI gauge charts
for kpi in kpis[:5]: # Limit to top 5
fig = go.Figure(go.Indicator(
mode = "gauge+number+delta",
value = kpi.value,
domain = {'x': [0, 1], 'y': [0, 1]},
title = {'text': kpi.kpi_name},
delta = {'reference': kpi.metadata.get('target', 0)},
gauge = {
'axis': {'range': [None, kpi.metadata.get('target', 100) * 1.5]},
'bar': {'color': "darkblue"},
'steps': [
{'range': [0, kpi.metadata.get('target', 100) * 0.5], 'color': "lightgray"},
{'range': [kpi.metadata.get('target', 100) * 0.5, kpi.metadata.get('target', 100)], 'color': "gray"}
],
'threshold': {
'line': {'color': "red", 'width': 4},
'thickness': 0.75,
'value': kpi.metadata.get('target', 100) * 0.9
}
}
))
charts[f"gauge_{kpi.kpi_name}"] = fig.to_json()
# Category comparison chart
categories = {}
for kpi in kpis:
if kpi.category not in categories:
categories[kpi.category] = []
categories[kpi.category].append(kpi.value / (kpi.metadata.get('target', 1) * 100))
fig = px.bar(
x=list(categories.keys()),
y=[sum(v)/len(v) for v in categories.values()],
title="KPI Performance by Category",
labels={"x": "Category", "y": "Average % of Target"}
)
charts["category_comparison"] = fig.to_json()
return charts
async def export_kpis(self, format: str = "csv", period: str = "monthly") -> bytes:
"""Export KPI data"""
kpis = await self.collect_all_kpis(period)
# Convert to DataFrame
df = pd.DataFrame([asdict(kpi) for kpi in kpis])
if format == "csv":
return df.to_csv(index=False).encode('utf-8')
elif format == "json":
return df.to_json(orient='records', indent=2).encode('utf-8')
elif format == "excel":
return df.to_excel(index=False).encode('utf-8')
else:
raise ValueError(f"Unsupported format: {format}")
async def generate_strategy_review(self, quarter: str) -> Dict[str, Any]:
"""Generate quarterly strategy review document"""
# Get KPI data for the quarter
kpis = await self.collect_all_kpis("all")
# Compare with previous quarter
previous_kpis = await self._get_previous_quarter_kpis(quarter)
# Generate analysis
analysis = {
"quarter": quarter,
"executive_summary": await self._generate_executive_summary(kpis, previous_kpis),
"key_achievements": await self._identify_achievements(kpis),
"challenges": await self._identify_challenges(kpis),
"recommendations": await self._generate_recommendations(kpis, previous_kpis),
"next_quarter_goals": await self._set_next_quarter_goals(kpis),
}
return analysis
async def _get_previous_quarter_kpis(self, quarter: str) -> List[KPIValue]:
"""Get KPIs from previous quarter"""
# Implementation would query historical KPI data
return [] # Sample
async def _generate_executive_summary(self, kpis: List[KPIValue], previous: List[KPIValue]) -> str:
"""Generate executive summary"""
# Implementation would analyze KPI trends
return "Ecosystem shows strong growth with 25% increase in active users and 40% growth in transaction volume."
async def _identify_achievements(self, kpis: List[KPIValue]) -> List[str]:
"""Identify key achievements"""
achievements = []
for kpi in kpis:
if kpi.value >= kpi.metadata.get("target", 0):
achievements.append(
f"Exceeded {kpi.kpi_name} target with {kpi.value:.2f} (target: {kpi.metadata.get('target')})"
)
return achievements
async def _identify_challenges(self, kpis: List[KPIValue]) -> List[str]:
"""Identify challenges and areas for improvement"""
challenges = []
for kpi in kpis:
if kpi.value < (kpi.metadata.get("target", 0) * 0.7):
challenges.append(
f"{kpi.kpi_name} below target at {kpi.value:.2f} (target: {kpi.metadata.get('target')})"
)
return challenges
async def _generate_recommendations(self, kpis: List[KPIValue], previous: List[KPIValue]) -> List[str]:
"""Generate strategic recommendations"""
recommendations = []
# Analyze trends and generate recommendations
recommendations.extend([
"Focus on improving developer onboarding to increase extension creation",
"Invest in cross-chain infrastructure to support growing volume",
"Enhance user retention programs to improve 30-day retention rate",
])
return recommendations
async def _set_next_quarter_goals(self, kpis: List[KPIValue]) -> Dict[str, float]:
"""Set goals for next quarter"""
goals = {}
for kpi in kpis:
# Set goals 10-20% higher than current performance
current_target = kpi.metadata.get("target", kpi.value)
next_target = current_target * 1.15
goals[kpi.kpi_name] = next_target
return goals
# CLI interface
async def main():
"""CLI entry point"""
import argparse
parser = argparse.ArgumentParser(description="AITBC Ecosystem KPI Tracker")
parser.add_argument("--collect", action="store_true", help="Collect all KPIs")
parser.add_argument("--dashboard", action="store_true", help="Generate KPI dashboard")
parser.add_argument("--export", choices=["csv", "json", "excel"], help="Export KPIs")
parser.add_argument("--period", default="daily", help="Period for KPI collection")
parser.add_argument("--strategy-review", help="Generate strategy review for quarter")
args = parser.parse_args()
tracker = EcosystemKPITracker()
if args.collect:
kpis = await tracker.collect_all_kpis(args.period)
print(f"Collected {len(kpis)} KPIs")
for kpi in kpis:
print(f"{kpi.kpi_name}: {kpi.value:.2f} {kpi.unit}")
elif args.dashboard:
dashboard = await tracker.generate_kpi_dashboard()
print(json.dumps(dashboard, indent=2, default=str))
elif args.export:
data = await tracker.export_kpis(args.export, args.period)
print(data.decode())
elif args.strategy_review:
review = await tracker.generate_strategy_review(args.strategy_review)
print(json.dumps(review, indent=2, default=str))
else:
parser.print_help()
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,635 +0,0 @@
openapi: 3.0.3
info:
title: AITBC Ecosystem Registry API
description: Public registry API for certified AITBC partners, SDKs, and integrations
version: 1.0.0
contact:
name: AITBC Ecosystem Team
email: ecosystem@aitbc.io
license:
name: MIT
url: https://opensource.org/licenses/MIT
servers:
- url: https://registry.aitbc.io/api/v1
description: Production server
- url: https://staging-registry.aitbc.io/api/v1
description: Staging server
paths:
/partners:
get:
summary: List certified partners
description: Retrieve a paginated list of all certified partners
tags:
- Partners
parameters:
- name: level
in: query
schema:
type: string
enum: [bronze, silver, gold]
description: Filter by certification level
- name: language
in: query
schema:
type: string
description: Filter by SDK language
- name: category
in: query
schema:
type: string
enum: [payment, erp, analytics, infrastructure]
description: Filter by partner category
- name: status
in: query
schema:
type: string
enum: [active, suspended, expired]
description: Filter by certification status
- name: page
in: query
schema:
type: integer
default: 1
description: Page number
- name: limit
in: query
schema:
type: integer
default: 20
maximum: 100
description: Items per page
responses:
'200':
description: Successful response
content:
application/json:
schema:
type: object
properties:
partners:
type: array
items:
$ref: '#/components/schemas/PartnerSummary'
pagination:
$ref: '#/components/schemas/Pagination'
filters:
type: object
description: Applied filters
/partners/{partnerId}:
get:
summary: Get partner details
description: Retrieve detailed information about a certified partner
tags:
- Partners
parameters:
- name: partnerId
in: path
required: true
schema:
type: string
description: Unique partner identifier
responses:
'200':
description: Successful response
content:
application/json:
schema:
$ref: '#/components/schemas/PartnerDetail'
'404':
$ref: '#/components/responses/NotFound'
/partners/{partnerId}/certification:
get:
summary: Get certification details
description: Retrieve certification information for a partner
tags:
- Certification
parameters:
- name: partnerId
in: path
required: true
schema:
type: string
responses:
'200':
description: Successful response
content:
application/json:
schema:
$ref: '#/components/schemas/Certification'
'404':
$ref: '#/components/responses/NotFound'
/partners/{partnerId}/verify:
get:
summary: Verify certification
description: Verify if a partner's certification is valid
tags:
- Certification
parameters:
- name: partnerId
in: path
required: true
schema:
type: string
responses:
'200':
description: Verification result
content:
application/json:
schema:
type: object
properties:
valid:
type: boolean
level:
type: string
enum: [bronze, silver, gold]
expires_at:
type: string
format: date-time
verification_id:
type: string
/sdks:
get:
summary: List certified SDKs
description: Retrieve a list of all certified SDKs
tags:
- SDKs
parameters:
- name: language
in: query
schema:
type: string
enum: [python, java, javascript, typescript, go, rust]
description: Filter by programming language
- name: version
in: query
schema:
type: string
description: Filter by SDK version
- name: level
in: query
schema:
type: string
enum: [bronze, silver, gold]
description: Filter by certification level
responses:
'200':
description: Successful response
content:
application/json:
schema:
type: object
properties:
sdks:
type: array
items:
$ref: '#/components/schemas/SDKSummary'
/sdks/{sdkId}:
get:
summary: Get SDK details
description: Retrieve detailed information about a certified SDK
tags:
- SDKs
parameters:
- name: sdkId
in: path
required: true
schema:
type: string
responses:
'200':
description: Successful response
content:
application/json:
schema:
$ref: '#/components/schemas/SDKDetail'
/search:
get:
summary: Search registry
description: Search for partners, SDKs, and integrations
tags:
- Search
parameters:
- name: q
in: query
required: true
schema:
type: string
description: Search query
- name: type
in: query
schema:
type: string
enum: [partner, sdk, integration, all]
default: all
description: Search target type
- name: level
in: query
schema:
type: string
enum: [bronze, silver, gold]
description: Filter by certification level
responses:
'200':
description: Search results
content:
application/json:
schema:
type: object
properties:
results:
type: array
items:
$ref: '#/components/schemas/SearchResult'
total:
type: integer
query:
type: string
/stats:
get:
summary: Registry statistics
description: Get overall registry statistics
tags:
- Statistics
responses:
'200':
description: Statistics
content:
application/json:
schema:
$ref: '#/components/schemas/RegistryStats'
/badges/{partnerId}/{level}.svg:
get:
summary: Get certification badge
description: Retrieve SVG badge for certified partner
tags:
- Badges
parameters:
- name: partnerId
in: path
required: true
schema:
type: string
- name: level
in: path
required: true
schema:
type: string
enum: [bronze, silver, gold]
responses:
'200':
description: SVG badge
content:
image/svg+xml:
schema:
type: string
components:
schemas:
PartnerSummary:
type: object
properties:
id:
type: string
description: Unique partner identifier
name:
type: string
description: Partner company name
logo_url:
type: string
description: URL to partner logo
description:
type: string
description: Brief partner description
website:
type: string
format: uri
description: Partner website URL
certification_level:
type: string
enum: [bronze, silver, gold]
description: Current certification level
category:
type: string
enum: [payment, erp, analytics, infrastructure]
description: Partner category
languages:
type: array
items:
type: string
description: Supported programming languages
certified_at:
type: string
format: date-time
description: Certification date
expires_at:
type: string
format: date-time
description: Certification expiration date
PartnerDetail:
allOf:
- $ref: '#/components/schemas/PartnerSummary'
- type: object
properties:
contact_email:
type: string
format: email
description: Contact email
support_url:
type: string
format: uri
description: Support documentation URL
documentation_url:
type: string
format: uri
description: API documentation URL
github_url:
type: string
format: uri
description: GitHub repository URL
integration_count:
type: integer
description: Number of certified integrations
test_results:
type: object
properties:
api_compliance:
type: object
properties:
score:
type: number
minimum: 0
maximum: 100
tests_run:
type: integer
tests_passed:
type: integer
security:
type: object
properties:
score:
type: number
minimum: 0
maximum: 100
vulnerabilities_found:
type: integer
critical_issues:
type: integer
performance:
type: object
properties:
avg_response_time:
type: number
throughput:
type: number
uptime:
type: number
Certification:
type: object
properties:
id:
type: string
description: Certification ID
partner_id:
type: string
description: Partner ID
level:
type: string
enum: [bronze, silver, gold]
description: Certification level
status:
type: string
enum: [active, suspended, expired]
description: Certification status
issued_at:
type: string
format: date-time
description: Issue date
expires_at:
type: string
format: date-time
description: Expiration date
test_results:
type: object
description: Test suite results
security_report:
type: object
description: Security validation report
criteria_met:
type: array
items:
type: string
description: List of certification criteria met
SDKSummary:
type: object
properties:
id:
type: string
description: SDK identifier
name:
type: string
description: SDK name
language:
type: string
description: Programming language
version:
type: string
description: Latest version
partner_id:
type: string
description: Partner ID
partner_name:
type: string
description: Partner name
certification_level:
type: string
enum: [bronze, silver, gold]
download_url:
type: string
format: uri
description: Download URL
documentation_url:
type: string
format: uri
description: Documentation URL
certified_at:
type: string
format: date-time
SDKDetail:
allOf:
- $ref: '#/components/schemas/SDKSummary'
- type: object
properties:
description:
type: string
description: SDK description
repository_url:
type: string
format: uri
description: Source repository URL
package_name:
type: string
description: Package name (pip, npm, maven)
dependencies:
type: array
items:
type: string
description: Key dependencies
supported_versions:
type: array
items:
type: string
description: Supported AITBC API versions
installation_command:
type: string
description: Installation command
quick_start:
type: string
description: Quick start code snippet
SearchResult:
type: object
properties:
type:
type: string
enum: [partner, sdk, integration]
description: Result type
id:
type: string
description: Item ID
name:
type: string
description: Item name
description:
type: string
description: Item description
certification_level:
type: string
enum: [bronze, silver, gold]
url:
type: string
format: uri
description: Item URL
relevance_score:
type: number
description: Search relevance score
Pagination:
type: object
properties:
page:
type: integer
description: Current page
limit:
type: integer
description: Items per page
total:
type: integer
description: Total items
pages:
type: integer
description: Total pages
has_next:
type: boolean
description: Has next page
has_prev:
type: boolean
description: Has previous page
RegistryStats:
type: object
properties:
total_partners:
type: integer
description: Total certified partners
total_sdks:
type: integer
description: Total certified SDKs
certification_breakdown:
type: object
properties:
bronze:
type: integer
silver:
type: integer
gold:
type: integer
language_breakdown:
type: object
additionalProperties:
type: integer
description: Number of SDKs per language
category_breakdown:
type: object
additionalProperties:
type: integer
description: Number of partners per category
last_updated:
type: string
format: date-time
description: Last update timestamp
responses:
NotFound:
description: Resource not found
content:
application/json:
schema:
type: object
properties:
error:
type: string
message:
type: string
BadRequest:
description: Bad request
content:
application/json:
schema:
type: object
properties:
error:
type: string
message:
type: string
details:
type: object
securitySchemes:
ApiKeyAuth:
type: apiKey
in: header
name: X-API-Key
description: API key for authenticated endpoints
security:
- ApiKeyAuth: []
tags:
- name: Partners
description: Partner management and lookup
- name: SDKs
description: SDK information and downloads
- name: Certification
description: Certification verification and details
- name: Search
description: Registry search functionality
- name: Statistics
description: Registry statistics and metrics
- name: Badges
description: Certification badges

View File

@@ -1,55 +0,0 @@
# AITBC SDK Conformance Test Suite
Language-agnostic test suite for validating AITBC SDK implementations against the official API specification.
## Architecture
The test suite uses black-box HTTP API testing to validate SDK compliance:
- **Mock AITBC Server**: Validates requests against OpenAPI spec
- **Test Runners**: Docker containers for each language
- **Test Fixtures**: JSON/YAML test cases
- **Reporting**: Detailed compliance reports
## Quick Start
```bash
# Run Bronze certification tests
docker-compose run python-sdk bronze
# Run Silver certification tests
docker-compose run python-sdk silver
# Run all tests
docker-compose run python-sdk all
```
## Test Structure
```
test-suite/
├── fixtures/ # Test cases (JSON/YAML)
├── runners/ # Language-specific test runners
├── mock-server/ # OpenAPI mock server
├── reports/ # Test results
└── docker-compose.yml
```
## Certification Levels
### Bronze Tests
- API compliance
- Authentication
- Error handling
- Data model validation
### Silver Tests
- Performance benchmarks
- Rate limiting
- Retry logic
- Async support
### Gold Tests
- Enterprise features
- Scalability
- Security compliance
- SLA validation

View File

@@ -1,175 +0,0 @@
#!/usr/bin/env python3
"""
Certify the AITBC Stripe connector as a validation of the certification system
"""
import asyncio
import json
import sys
from pathlib import Path
# Add test suite to path
sys.path.insert(0, str(Path(__file__).parent))
from runners.python.test_runner import ConformanceTestRunner
from security.security_validator import SecurityValidator
async def certify_stripe_connector():
"""Run full certification on Stripe connector"""
print("=" * 60)
print("AITBC Stripe Connector Certification")
print("=" * 60)
# Configuration
base_url = "http://localhost:8011" # Mock server
api_key = "test-api-key"
sdk_path = Path(__file__).parent.parent.parent / "enterprise-connectors" / "python-sdk"
# 1. Run conformance tests
print("\n1. Running SDK Conformance Tests...")
runner = ConformanceTestRunner(base_url, api_key)
# Run Bronze tests
bronze_suite = Path(__file__).parent / "fixtures" / "bronze" / "api-compliance.json"
bronze_result = await runner.run_suite(str(bronze_suite), "bronze")
# Check if Bronze passed
if bronze_result.compliance_score < 95:
print(f"\n❌ Bronze certification FAILED: {bronze_result.compliance_score:.1f}%")
return False
print(f"\n✅ Bronze certification PASSED: {bronze_result.compliance_score:.1f}%")
# 2. Run security validation
print("\n2. Running Security Validation...")
validator = SecurityValidator()
security_report = validator.validate(str(sdk_path), "bronze")
print(f"\nSecurity Score: {security_report.score}/100")
print(f"Issues Found: {len(security_report.issues)}")
if security_report.blocked:
print("\n❌ Security validation BLOCKED certification")
for issue in security_report.issues:
if issue.severity in ["critical", "high"]:
print(f" - {issue.description} ({issue.severity})")
return False
print("\n✅ Security validation PASSED")
# 3. Generate certification report
print("\n3. Generating Certification Report...")
certification = {
"partner": {
"name": "AITBC",
"id": "aitbc-official",
"website": "https://aitbc.io",
"description": "Official AITBC Python SDK with Stripe connector"
},
"sdk": {
"name": "aitbc-enterprise-python",
"version": "1.0.0",
"language": "python",
"repository": "https://github.com/aitbc/enterprise-connectors"
},
"certification": {
"level": "bronze",
"issued_at": "2024-01-15T00:00:00Z",
"expires_at": "2025-01-15T00:00:00Z",
"id": "CERT-STRIPE-001"
},
"test_results": {
"api_compliance": {
"score": bronze_result.compliance_score,
"tests_run": bronze_result.total_tests,
"tests_passed": bronze_result.passed_tests
},
"security": {
"score": security_report.score,
"vulnerabilities_found": len(security_report.issues),
"critical_issues": sum(1 for i in security_report.issues if i.severity == "critical")
}
},
"criteria_met": [
"Core API compatibility",
"Authentication support",
"Error handling standards",
"Data model compliance",
"Async support",
"Basic security practices",
"Documentation completeness"
]
}
# Save report
report_path = Path(__file__).parent / "reports" / "stripe-certification.json"
report_path.parent.mkdir(exist_ok=True)
with open(report_path, 'w') as f:
json.dump(certification, f, indent=2)
print(f"\n✅ Certification report saved to: {report_path}")
# 4. Generate badge
print("\n4. Generating Certification Badge...")
badge_svg = f'''<svg xmlns="http://www.w3.org/2000/svg" width="120" height="20">
<linearGradient id="b" x2="0" y2="100%">
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
<stop offset="1" stop-opacity=".1"/>
</linearGradient>
<clipPath id="a">
<rect width="120" height="20" rx="3" fill="#fff"/>
</clipPath>
<g clip-path="url(#a)">
<path fill="#555" d="M0 0h55v20H0z"/>
<path fill="#CD7F32" d="M55 0h65v20H55z"/>
<path fill="url(#b)" d="M0 0h120v20H0z"/>
</g>
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
<text x="27.5" y="15" fill="#010101" fill-opacity=".3">AITBC</text>
<text x="27.5" y="14">AITBC</text>
<text x="87.5" y="15" fill="#010101" fill-opacity=".3">Bronze</text>
<text x="87.5" y="14">Bronze</text>
</g>
</svg>'''
badge_path = Path(__file__).parent / "reports" / "stripe-bronze.svg"
with open(badge_path, 'w') as f:
f.write(badge_svg)
print(f"✅ Badge saved to: {badge_path}")
# 5. Summary
print("\n" + "=" * 60)
print("CERTIFICATION COMPLETE")
print("=" * 60)
print(f"Partner: AITBC")
print(f"SDK: aitbc-enterprise-python (Stripe connector)")
print(f"Level: Bronze")
print(f"API Compliance: {bronze_result.compliance_score:.1f}%")
print(f"Security Score: {security_report.score}/100")
print(f"Certification ID: CERT-STRIPE-001")
print(f"Valid Until: 2025-01-15")
return True
async def main():
"""Main entry point"""
success = await certify_stripe_connector()
if success:
print("\n🎉 Stripe connector successfully certified!")
print("\nThe certification system is validated and ready for external partners.")
sys.exit(0)
else:
print("\n❌ Certification failed. Please fix issues before proceeding.")
sys.exit(1)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,264 +0,0 @@
{
"name": "API Compliance Tests",
"level": "bronze",
"description": "Tests for core API compliance",
"tests": [
{
"id": "BR-001",
"name": "Health Check Endpoint",
"description": "Validate health check endpoint returns proper response",
"request": {
"method": "GET",
"path": "/health",
"headers": {
"Accept": "application/json"
}
},
"expected": {
"status": 200,
"headers": {
"Content-Type": "application/json"
},
"body": {
"status": "healthy",
"timestamp": "string",
"version": "string"
}
}
},
{
"id": "BR-002",
"name": "Authentication - Bearer Token",
"description": "Validate bearer token authentication",
"request": {
"method": "GET",
"path": "/api/v1/user/profile",
"headers": {
"Authorization": "Bearer valid-token",
"Accept": "application/json"
}
},
"expected": {
"status": 200,
"headers": {
"Content-Type": "application/json"
},
"body": {
"id": "string",
"email": "string",
"created_at": "string"
}
}
},
{
"id": "BR-003",
"name": "Authentication - Invalid Token",
"description": "Validate proper error for invalid token",
"request": {
"method": "GET",
"path": "/api/v1/user/profile",
"headers": {
"Authorization": "Bearer invalid-token",
"Accept": "application/json"
}
},
"expected": {
"status": 401,
"headers": {
"Content-Type": "application/json"
},
"body": {
"error": "AuthenticationError",
"message": "string"
}
}
},
{
"id": "BR-004",
"name": "Create Job - Valid Request",
"description": "Validate job creation with valid parameters",
"request": {
"method": "POST",
"path": "/api/v1/jobs",
"headers": {
"Authorization": "Bearer valid-token",
"Content-Type": "application/json"
},
"body": {
"service_type": "gpu_compute",
"spec": {
"gpu_type": "A100",
"count": 1,
"duration": 3600
},
"metadata": {
"name": "test-job"
}
}
},
"expected": {
"status": 201,
"headers": {
"Content-Type": "application/json",
"Location": "string"
},
"body": {
"id": "string",
"status": "pending",
"created_at": "string",
"estimated_completion": "string"
}
}
},
{
"id": "BR-005",
"name": "Create Job - Invalid Parameters",
"description": "Validate proper error for invalid job parameters",
"request": {
"method": "POST",
"path": "/api/v1/jobs",
"headers": {
"Authorization": "Bearer valid-token",
"Content-Type": "application/json"
},
"body": {
"service_type": "invalid_service",
"spec": {}
}
},
"expected": {
"status": 400,
"headers": {
"Content-Type": "application/json"
},
"body": {
"error": "ValidationError",
"message": "string",
"details": {
"field": "service_type",
"issue": "string"
}
}
}
},
{
"id": "BR-006",
"name": "Get Job - Valid ID",
"description": "Validate job retrieval with valid ID",
"request": {
"method": "GET",
"path": "/api/v1/jobs/job-123",
"headers": {
"Authorization": "Bearer valid-token",
"Accept": "application/json"
}
},
"expected": {
"status": 200,
"headers": {
"Content-Type": "application/json"
},
"body": {
"id": "string",
"status": "string",
"created_at": "string",
"updated_at": "string",
"spec": "object",
"result": "object|null"
}
}
},
{
"id": "BR-007",
"name": "Get Job - Not Found",
"description": "Validate proper error for non-existent job",
"request": {
"method": "GET",
"path": "/api/v1/jobs/nonexistent",
"headers": {
"Authorization": "Bearer valid-token",
"Accept": "application/json"
}
},
"expected": {
"status": 404,
"headers": {
"Content-Type": "application/json"
},
"body": {
"error": "NotFoundError",
"message": "string"
}
}
},
{
"id": "BR-008",
"name": "List Jobs - With Pagination",
"description": "Validate job listing with pagination",
"request": {
"method": "GET",
"path": "/api/v1/jobs?limit=10&offset=0",
"headers": {
"Authorization": "Bearer valid-token",
"Accept": "application/json"
}
},
"expected": {
"status": 200,
"headers": {
"Content-Type": "application/json"
},
"body": {
"jobs": "array",
"total": "number",
"limit": "number",
"offset": "number",
"has_more": "boolean"
}
}
},
{
"id": "BR-009",
"name": "Error Response Format",
"description": "Validate consistent error response format",
"request": {
"method": "POST",
"path": "/api/v1/invalid-endpoint",
"headers": {
"Authorization": "Bearer valid-token"
}
},
"expected": {
"status": 404,
"headers": {
"Content-Type": "application/json"
},
"body": {
"error": "string",
"message": "string",
"request_id": "string"
}
}
},
{
"id": "BR-010",
"name": "Rate Limit Headers",
"description": "Validate rate limit headers are present",
"request": {
"method": "GET",
"path": "/api/v1/jobs",
"headers": {
"Authorization": "Bearer valid-token"
}
},
"expected": {
"status": 200,
"headers": {
"X-RateLimit-Limit": "string",
"X-RateLimit-Remaining": "string",
"X-RateLimit-Reset": "string"
}
}
}
]
}

Some files were not shown because too many files have changed in this diff Show More