```
chore: remove obsolete files and add Solidity build artifacts to .gitignore - Add ignore patterns for Solidity build artifacts (typechain-types, artifacts, cache) - Remove unused exchange mock API server (api/exchange_mock_api.py) - Remove obsolete client-web README placeholder - Remove deprecated marketplace-ui HTML implementation ```
This commit is contained in:
@@ -1,9 +0,0 @@
|
||||
# Client Web
|
||||
|
||||
## Purpose & Scope
|
||||
|
||||
Front-end application that allows users to submit compute jobs, monitor status, and interact with AITBC services. See `docs/bootstrap/dirs.md` and `docs/bootstrap/examples.md` for guidance.
|
||||
|
||||
## Development Setup
|
||||
|
||||
Implementation pending. Recommended stack: lightweight web framework (per bootstrap doc) without heavy front-end frameworks.
|
||||
@@ -1,491 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AITBC Marketplace - GPU Compute Trading</title>
|
||||
<base href="/Marketplace/">
|
||||
<link rel="stylesheet" href="/assets/css/aitbc.css">
|
||||
<script src="/assets/js/axios.min.js"></script>
|
||||
<script src="/assets/js/lucide.js"></script>
|
||||
<style>
|
||||
.gradient-bg {
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
}
|
||||
.card-hover {
|
||||
transition: all 0.3s ease;
|
||||
}
|
||||
.card-hover:hover {
|
||||
transform: translateY(-4px);
|
||||
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body class="bg-gray-50 dark:bg-gray-900 transition-colors duration-300">
|
||||
<!-- Header -->
|
||||
<header class="gradient-bg text-white shadow-lg">
|
||||
<div class="container mx-auto px-4 py-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-3">
|
||||
<i data-lucide="cpu" class="w-8 h-8"></i>
|
||||
<h1 class="text-2xl font-bold">AITBC Marketplace</h1>
|
||||
</div>
|
||||
<nav class="flex items-center space-x-6">
|
||||
<button onclick="showSection('marketplace')" class="hover:text-purple-200 transition">Marketplace</button>
|
||||
<button onclick="showSection('register')" class="hover:text-purple-200 transition">Register GPU</button>
|
||||
<button onclick="showSection('my-bids')" class="hover:text-purple-200 transition">My Listings</button>
|
||||
<button onclick="toggleDarkMode()" class="hover:text-purple-200 transition" title="Toggle dark mode">
|
||||
<i data-lucide="moon" class="w-5 h-5" id="darkModeIcon"></i>
|
||||
</button>
|
||||
<button onclick="connectWallet()" class="bg-white text-purple-600 px-4 py-2 rounded-lg hover:bg-purple-100 transition">
|
||||
<i data-lucide="wallet" class="w-4 h-4 inline mr-2"></i>Connect Wallet
|
||||
</button>
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Main Content -->
|
||||
<main class="container mx-auto px-4 py-8">
|
||||
<!-- Stats Section -->
|
||||
<section class="grid grid-cols-1 md:grid-cols-4 gap-6 mb-8">
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-gray-500 dark:text-gray-400 text-sm">Active Bids</p>
|
||||
<p class="text-2xl font-bold text-gray-900 dark:text-white" id="activeBids">0</p>
|
||||
</div>
|
||||
<i data-lucide="trending-up" class="w-8 h-8 text-purple-500"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-gray-500 text-sm">Total Capacity</p>
|
||||
<p class="text-2xl font-bold" id="totalCapacity">0 GPUs</p>
|
||||
</div>
|
||||
<i data-lucide="server" class="w-8 h-8 text-blue-500"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-gray-500 text-sm">Avg Price</p>
|
||||
<p class="text-2xl font-bold" id="avgPrice">$0.00</p>
|
||||
</div>
|
||||
<i data-lucide="dollar-sign" class="w-8 h-8 text-green-500"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-gray-500 text-sm">Your Balance</p>
|
||||
<p class="text-2xl font-bold" id="walletBalance">0 AITBC</p>
|
||||
</div>
|
||||
<i data-lucide="coins" class="w-8 h-8 text-yellow-500"></i>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- Marketplace Section -->
|
||||
<section id="marketplaceSection" class="section">
|
||||
<div class="flex justify-between items-center mb-6">
|
||||
<h2 class="text-2xl font-bold">Available GPU Compute</h2>
|
||||
<div class="flex space-x-4">
|
||||
<select class="border rounded-lg px-4 py-2" id="sortSelect">
|
||||
<option value="price">Sort by Price</option>
|
||||
<option value="capacity">Sort by Capacity</option>
|
||||
<option value="memory">Sort by Memory</option>
|
||||
</select>
|
||||
<button onclick="refreshMarketplace()" class="bg-purple-600 text-white px-4 py-2 rounded-lg hover:bg-purple-700 transition">
|
||||
<i data-lucide="refresh-cw" class="w-4 h-4 inline mr-2"></i>Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="marketplaceList" class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
<!-- GPU cards will be inserted here -->
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- Register GPU Section -->
|
||||
<section id="registerSection" class="section hidden">
|
||||
<div class="max-w-2xl mx-auto">
|
||||
<h2 class="text-2xl font-bold mb-6">Register Your GPU</h2>
|
||||
<div class="bg-white rounded-lg shadow-lg p-8">
|
||||
<form id="gpuRegisterForm" class="space-y-6">
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">GPU Model</label>
|
||||
<input type="text" id="gpuModel" class="w-full border rounded-lg px-4 py-2" placeholder="e.g., NVIDIA RTX 4060 Ti" required>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">Memory (GB)</label>
|
||||
<input type="number" id="gpuMemory" class="w-full border rounded-lg px-4 py-2" placeholder="16" required>
|
||||
</div>
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">Price per Hour ($)</label>
|
||||
<input type="number" id="gpuPrice" step="0.01" class="w-full border rounded-lg px-4 py-2" placeholder="0.50" required>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">CUDA Version</label>
|
||||
<select id="cudaVersion" class="w-full border rounded-lg px-4 py-2">
|
||||
<option value="11.8">CUDA 11.8</option>
|
||||
<option value="12.0">CUDA 12.0</option>
|
||||
<option value="12.1">CUDA 12.1</option>
|
||||
<option value="12.2">CUDA 12.2</option>
|
||||
<option value="12.3">CUDA 12.3</option>
|
||||
<option value="12.4" selected>CUDA 12.4</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">Supported Models</label>
|
||||
<div class="space-y-2">
|
||||
<label class="flex items-center">
|
||||
<input type="checkbox" value="stable-diffusion" class="mr-2" checked>
|
||||
<span>Stable Diffusion</span>
|
||||
</label>
|
||||
<label class="flex items-center">
|
||||
<input type="checkbox" value="llama2-7b" class="mr-2" checked>
|
||||
<span>LLaMA-2 7B</span>
|
||||
</label>
|
||||
<label class="flex items-center">
|
||||
<input type="checkbox" value="llama2-13b" class="mr-2">
|
||||
<span>LLaMA-2 13B</span>
|
||||
</label>
|
||||
<label class="flex items-center">
|
||||
<input type="checkbox" value="whisper" class="mr-2" checked>
|
||||
<span>Whisper</span>
|
||||
</label>
|
||||
<label class="flex items-center">
|
||||
<input type="checkbox" value="clip" class="mr-2" checked>
|
||||
<span>CLIP</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label class="block text-sm font-medium text-gray-700 mb-2">Additional Notes</label>
|
||||
<textarea id="gpuNotes" rows="3" class="w-full border rounded-lg px-4 py-2" placeholder="Any additional information about your GPU setup..."></textarea>
|
||||
</div>
|
||||
|
||||
<button type="submit" class="w-full bg-purple-600 text-white py-3 rounded-lg hover:bg-purple-700 transition font-semibold">
|
||||
Register GPU
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<!-- My Bids Section -->
|
||||
<section id="myBidsSection" class="section hidden">
|
||||
<h2 class="text-2xl font-bold mb-6">My GPU Listings</h2>
|
||||
<div id="myBidsList" class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
<!-- Your listings will appear here -->
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
|
||||
<!-- Toast Notification -->
|
||||
<div id="toast" class="fixed bottom-4 right-4 bg-green-500 text-white px-6 py-3 rounded-lg shadow-lg transform translate-y-full transition-transform duration-300">
|
||||
<span id="toastMessage"></span>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// API Configuration
|
||||
const API_BASE = window.location.origin + '/api';
|
||||
const BLOCKCHAIN_API = window.location.origin + '/rpc';
|
||||
let walletAddress = null;
|
||||
let connectedWallet = null;
|
||||
|
||||
// Initialize
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
lucide.createIcons();
|
||||
loadMarketplaceStats();
|
||||
loadMarketplaceBids();
|
||||
|
||||
// Form submission
|
||||
document.getElementById('gpuRegisterForm').addEventListener('submit', registerGPU);
|
||||
|
||||
// Check for saved dark mode preference
|
||||
if (localStorage.getItem('darkMode') === 'true' ||
|
||||
(!localStorage.getItem('darkMode') && window.matchMedia('(prefers-color-scheme: dark)').matches)) {
|
||||
document.documentElement.classList.add('dark');
|
||||
updateDarkModeIcon(true);
|
||||
}
|
||||
});
|
||||
|
||||
// Dark mode toggle
|
||||
function toggleDarkMode() {
|
||||
const isDark = document.documentElement.classList.toggle('dark');
|
||||
localStorage.setItem('darkMode', isDark);
|
||||
updateDarkModeIcon(isDark);
|
||||
}
|
||||
|
||||
function updateDarkModeIcon(isDark) {
|
||||
const icon = document.getElementById('darkModeIcon');
|
||||
if (isDark) {
|
||||
icon.setAttribute('data-lucide', 'sun');
|
||||
} else {
|
||||
icon.setAttribute('data-lucide', 'moon');
|
||||
}
|
||||
lucide.createIcons();
|
||||
}
|
||||
|
||||
// Section Navigation
|
||||
function showSection(section) {
|
||||
document.querySelectorAll('.section').forEach(s => s.classList.add('hidden'));
|
||||
document.getElementById(section + 'Section').classList.remove('hidden');
|
||||
|
||||
if (section === 'my-bids') {
|
||||
loadMyBids();
|
||||
}
|
||||
}
|
||||
|
||||
// Connect Wallet
|
||||
async function connectWallet() {
|
||||
// For demo, create a new wallet
|
||||
const walletId = 'wallet-' + Math.random().toString(36).substr(2, 9);
|
||||
const address = 'aitbc1' + walletId + 'x'.repeat(40 - walletId.length);
|
||||
|
||||
connectedWallet = {
|
||||
id: walletId,
|
||||
address: address,
|
||||
publicKey: '0x' + Array(64).fill(0).map(() => Math.floor(Math.random() * 16).toString(16)).join('')
|
||||
};
|
||||
|
||||
walletAddress = address;
|
||||
showToast('Wallet connected: ' + address.substring(0, 20) + '...');
|
||||
updateWalletBalance();
|
||||
}
|
||||
|
||||
// Load Marketplace Stats
|
||||
async function loadMarketplaceStats() {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE}/marketplace/stats`);
|
||||
const stats = response.data;
|
||||
document.getElementById('activeBids').textContent = stats.activeBids;
|
||||
document.getElementById('totalCapacity').textContent = stats.openCapacity + ' GPUs';
|
||||
document.getElementById('avgPrice').textContent = '$' + stats.averagePrice.toFixed(2);
|
||||
} catch (error) {
|
||||
console.error('Failed to load stats:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load Marketplace Bids
|
||||
async function loadMarketplaceBids() {
|
||||
try {
|
||||
const response = await axios.get(`${API_BASE}/marketplace/offers`);
|
||||
const bids = response.data;
|
||||
displayMarketplaceBids(bids);
|
||||
} catch (error) {
|
||||
console.error('Failed to load bids:', error);
|
||||
// Display demo data if API fails
|
||||
displayDemoBids();
|
||||
}
|
||||
}
|
||||
|
||||
// Display Marketplace Bids
|
||||
function displayMarketplaceBids(bids) {
|
||||
const container = document.getElementById('marketplaceList');
|
||||
|
||||
if (bids.length === 0) {
|
||||
container.innerHTML = '<div class="col-span-full text-center py-12 text-gray-500">No GPU offers available at the moment.</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
container.innerHTML = bids.map(bid => `
|
||||
<div class="bg-white rounded-lg shadow-lg p-6 card-hover">
|
||||
<div class="flex justify-between items-start mb-4">
|
||||
<h3 class="text-lg font-semibold">${bid.provider}</h3>
|
||||
<span class="bg-green-100 text-green-800 px-2 py-1 rounded text-sm">Available</span>
|
||||
</div>
|
||||
<div class="space-y-2 text-sm text-gray-600 mb-4">
|
||||
<p><i data-lucide="monitor" class="w-4 h-4 inline mr-1"></i>GPU: ${bid.gpu_model || 'Not specified'}</p>
|
||||
<p><i data-lucide="hard-drive" class="w-4 h-4 inline mr-1"></i>Memory: ${bid.gpu_memory_gb || 'N/A'} GB</p>
|
||||
<p><i data-lucide="clock" class="w-4 h-4 inline mr-1"></i>Capacity: ${bid.capacity || 1} GPU(s)</p>
|
||||
</div>
|
||||
<div class="flex justify-between items-center">
|
||||
<span class="text-2xl font-bold text-purple-600">$${bid.price || '0.50'}/hr</span>
|
||||
<button onclick="purchaseGPU('${bid.id}')" class="bg-purple-600 text-white px-4 py-2 rounded hover:bg-purple-700 transition">
|
||||
Purchase
|
||||
</button>
|
||||
</div>
|
||||
${bid.notes ? `<p class="mt-4 text-sm text-gray-500">${bid.notes}</p>` : ''}
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
lucide.createIcons();
|
||||
}
|
||||
|
||||
// Display Demo Bids (for testing)
|
||||
function displayDemoBids() {
|
||||
const demoBids = [
|
||||
{
|
||||
id: 'demo1',
|
||||
provider: 'REDACTED_MINER_KEY',
|
||||
gpu_model: 'NVIDIA RTX 4060 Ti',
|
||||
gpu_memory_gb: 16,
|
||||
capacity: 1,
|
||||
price: 0.50,
|
||||
notes: 'NVIDIA RTX 4060 Ti 16GB - Available for AI workloads'
|
||||
}
|
||||
];
|
||||
displayMarketplaceBids(demoBids);
|
||||
}
|
||||
|
||||
// Register GPU
|
||||
async function registerGPU(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const gpuModel = document.getElementById('gpuModel').value;
|
||||
const gpuMemory = document.getElementById('gpuMemory').value;
|
||||
const gpuPrice = document.getElementById('gpuPrice').value;
|
||||
const cudaVersion = document.getElementById('cudaVersion').value;
|
||||
const gpuNotes = document.getElementById('gpuNotes').value;
|
||||
|
||||
const supportedModels = [];
|
||||
document.querySelectorAll('input[type="checkbox"]:checked').forEach(cb => {
|
||||
supportedModels.push(cb.value);
|
||||
});
|
||||
|
||||
try {
|
||||
// First register as miner
|
||||
const minerResponse = await axios.post(`${API_BASE}/miners/register`, {
|
||||
capabilities: {
|
||||
gpu: gpuModel,
|
||||
gpu_memory_gb: parseInt(gpuMemory),
|
||||
cuda_version: cudaVersion,
|
||||
supported_models: supportedModels,
|
||||
region: 'local',
|
||||
pricing_per_hour: parseFloat(gpuPrice)
|
||||
}
|
||||
}, {
|
||||
headers: { 'X-Api-Key': 'REDACTED_MINER_KEY' }
|
||||
});
|
||||
|
||||
// Then create marketplace bid
|
||||
const bidResponse = await axios.post(`${API_BASE}/marketplace/bids`, {
|
||||
provider: 'REDACTED_MINER_KEY',
|
||||
capacity: 1,
|
||||
price: parseFloat(gpuPrice),
|
||||
notes: `${gpuModel} ${gpuMemory}GB - ${supportedModels.join(', ')}${gpuNotes ? '. ' + gpuNotes : ''}`
|
||||
}, {
|
||||
headers: { 'X-Api-Key': 'REDACTED_CLIENT_KEY' }
|
||||
});
|
||||
|
||||
showToast('GPU registered successfully!');
|
||||
document.getElementById('gpuRegisterForm').reset();
|
||||
loadMarketplaceStats();
|
||||
loadMarketplaceBids();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Registration failed:', error);
|
||||
showToast('Registration failed. Please try again.', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
// Purchase GPU
|
||||
async function purchaseGPU(bidId) {
|
||||
if (!walletAddress) {
|
||||
showToast('Please connect your wallet first', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create job for GPU purchase
|
||||
try {
|
||||
const response = await axios.post(`${API_BASE}/jobs`, {
|
||||
job_type: 'inference',
|
||||
model: 'stable-diffusion',
|
||||
requirements: {
|
||||
gpu_memory_min_gb: 8,
|
||||
cuda_version_min: '11.0'
|
||||
},
|
||||
pricing: {
|
||||
max_price_per_hour: 1.0,
|
||||
duration_hours: 1
|
||||
}
|
||||
}, {
|
||||
headers: { 'X-Api-Key': 'REDACTED_CLIENT_KEY' }
|
||||
});
|
||||
|
||||
showToast('GPU time purchased successfully!');
|
||||
updateWalletBalance();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Purchase failed:', error);
|
||||
showToast('Purchase failed. Please try again.', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
// Load My Bids
|
||||
function loadMyBids() {
|
||||
const myBidsList = document.getElementById('myBidsList');
|
||||
|
||||
// For demo, show the registered GPU
|
||||
myBidsList.innerHTML = `
|
||||
<div class="bg-white rounded-lg shadow-lg p-6">
|
||||
<div class="flex justify-between items-start mb-4">
|
||||
<h3 class="text-lg font-semibold">NVIDIA RTX 4060 Ti</h3>
|
||||
<span class="bg-green-100 text-green-800 px-2 py-1 rounded text-sm">Active</span>
|
||||
</div>
|
||||
<div class="space-y-2 text-sm text-gray-600 mb-4">
|
||||
<p><i data-lucide="monitor" class="w-4 h-4 inline mr-1"></i>Memory: 16 GB</p>
|
||||
<p><i data-lucide="clock" class="w-4 h-4 inline mr-1"></i>Price: $0.50/hr</p>
|
||||
<p><i data-lucide="activity" class="w-4 h-4 inline mr-1"></i>Status: Available</p>
|
||||
</div>
|
||||
<div class="flex space-x-2">
|
||||
<button class="flex-1 bg-blue-600 text-white px-3 py-2 rounded hover:bg-blue-700 transition text-sm">
|
||||
Edit
|
||||
</button>
|
||||
<button class="flex-1 bg-red-600 text-white px-3 py-2 rounded hover:bg-red-700 transition text-sm">
|
||||
Remove
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
lucide.createIcons();
|
||||
}
|
||||
|
||||
// Update Wallet Balance
|
||||
async function updateWalletBalance() {
|
||||
if (!walletAddress) return;
|
||||
|
||||
try {
|
||||
const response = await axios.get(`${BLOCKCHAIN_API}/getBalance/${walletAddress}`);
|
||||
document.getElementById('walletBalance').textContent = response.data.balance + ' AITBC';
|
||||
} catch (error) {
|
||||
document.getElementById('walletBalance').textContent = '1000 AITBC'; // Demo balance
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh Marketplace
|
||||
function refreshMarketplace() {
|
||||
loadMarketplaceStats();
|
||||
loadMarketplaceBids();
|
||||
showToast('Marketplace refreshed');
|
||||
}
|
||||
|
||||
// Toast Notification
|
||||
function showToast(message, type = 'success') {
|
||||
const toast = document.getElementById('toast');
|
||||
const toastMessage = document.getElementById('toastMessage');
|
||||
|
||||
toastMessage.textContent = message;
|
||||
toast.className = `fixed bottom-4 right-4 px-6 py-3 rounded-lg shadow-lg transform transition-transform duration-300 ${
|
||||
type === 'error' ? 'bg-red-500' : 'bg-green-500'
|
||||
} text-white`;
|
||||
|
||||
toast.style.transform = 'translateY(0)';
|
||||
|
||||
setTimeout(() => {
|
||||
toast.style.transform = 'translateY(100%)';
|
||||
}, 3000);
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,53 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple HTTP server for the AITBC Marketplace UI
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
||||
import argparse
|
||||
|
||||
class CORSHTTPRequestHandler(SimpleHTTPRequestHandler):
|
||||
def end_headers(self):
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
||||
self.send_header('Access-Control-Allow-Headers', 'Content-Type, X-Api-Key')
|
||||
super().end_headers()
|
||||
|
||||
def do_OPTIONS(self):
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
|
||||
def run_server(port=3000, directory=None):
|
||||
"""Run the HTTP server"""
|
||||
if directory:
|
||||
os.chdir(directory)
|
||||
|
||||
server_address = ('', port)
|
||||
httpd = HTTPServer(server_address, CORSHTTPRequestHandler)
|
||||
|
||||
print(f"""
|
||||
╔═══════════════════════════════════════╗
|
||||
║ AITBC Marketplace UI Server ║
|
||||
╠═══════════════════════════════════════╣
|
||||
║ Server running at: ║
|
||||
║ http://localhost:{port} ║
|
||||
║ ║
|
||||
║ Press Ctrl+C to stop ║
|
||||
╚═══════════════════════════════════════╝
|
||||
""")
|
||||
|
||||
try:
|
||||
httpd.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
print("\nShutting down server...")
|
||||
httpd.server_close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Run the AITBC Marketplace UI server')
|
||||
parser.add_argument('--port', type=int, default=3000, help='Port to run the server on')
|
||||
parser.add_argument('--dir', type=str, default='.', help='Directory to serve from')
|
||||
|
||||
args = parser.parse_args()
|
||||
run_server(port=args.port, directory=args.dir)
|
||||
@@ -1,164 +0,0 @@
|
||||
# AITBC Miner Dashboard
|
||||
|
||||
A real-time monitoring dashboard for GPU mining operations in the AITBC network.
|
||||
|
||||
## Features
|
||||
|
||||
### 🎯 GPU Monitoring
|
||||
- Real-time GPU utilization
|
||||
- Temperature monitoring
|
||||
- Power consumption tracking
|
||||
- Memory usage display
|
||||
- Performance state indicators
|
||||
|
||||
### ⛏️ Mining Operations
|
||||
- Active job tracking
|
||||
- Job progress visualization
|
||||
- Success/failure statistics
|
||||
- Average job time metrics
|
||||
|
||||
### 📊 Performance Analytics
|
||||
- GPU utilization charts (last hour)
|
||||
- Hash rate performance tracking
|
||||
- Mining statistics dashboard
|
||||
- Service capability overview
|
||||
|
||||
### 🔧 Available Services
|
||||
- GPU Computing (CUDA cores)
|
||||
- Parallel Processing (multi-threaded)
|
||||
- Hash Generation (proof-of-work)
|
||||
- AI Model Training (ML operations)
|
||||
- Blockchain Validation
|
||||
- Data Processing
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Deploy the Dashboard
|
||||
```bash
|
||||
cd /home/oib/windsurf/aitbc/apps/miner-dashboard
|
||||
sudo ./deploy.sh
|
||||
```
|
||||
|
||||
### 2. Access the Dashboard
|
||||
- Local: http://localhost:8080
|
||||
- Remote: http://[SERVER_IP]:8080
|
||||
|
||||
### 3. Monitor Mining
|
||||
- View real-time GPU status
|
||||
- Track active mining jobs
|
||||
- Monitor hash rates
|
||||
- Check service availability
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
|
||||
│ Web Browser │◄──►│ Dashboard Server │◄──►│ GPU Miner │
|
||||
│ (Dashboard UI) │ │ (Port 8080) │ │ (Background) │
|
||||
└─────────────────┘ └──────────────────┘ └─────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ nvidia-smi │
|
||||
│ (GPU Metrics) │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- `GET /api/gpu-status` - Real-time GPU metrics
|
||||
- `GET /api/mining-jobs` - Active mining jobs
|
||||
- `GET /api/statistics` - Mining statistics
|
||||
- `GET /api/services` - Available services
|
||||
|
||||
## Service Management
|
||||
|
||||
### Start Services
|
||||
```bash
|
||||
sudo systemctl start aitbc-miner
|
||||
sudo systemctl start aitbc-miner-dashboard
|
||||
```
|
||||
|
||||
### Stop Services
|
||||
```bash
|
||||
sudo systemctl stop aitbc-miner
|
||||
sudo systemctl stop aitbc-miner-dashboard
|
||||
```
|
||||
|
||||
### View Logs
|
||||
```bash
|
||||
sudo journalctl -u aitbc-miner -f
|
||||
sudo journalctl -u aitbc-miner-dashboard -f
|
||||
```
|
||||
|
||||
## GPU Requirements
|
||||
|
||||
- NVIDIA GPU with CUDA support
|
||||
- nvidia-smi utility installed
|
||||
- GPU memory: 4GB+ recommended
|
||||
- CUDA drivers up to date
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Dashboard Not Loading
|
||||
```bash
|
||||
# Check service status
|
||||
sudo systemctl status aitbc-miner-dashboard
|
||||
|
||||
# Check logs
|
||||
sudo journalctl -u aitbc-miner-dashboard -n 50
|
||||
```
|
||||
|
||||
### GPU Not Detected
|
||||
```bash
|
||||
# Verify nvidia-smi
|
||||
nvidia-smi
|
||||
|
||||
# Check GPU permissions
|
||||
ls -l /dev/nvidia*
|
||||
```
|
||||
|
||||
### No Mining Jobs
|
||||
```bash
|
||||
# Check miner service
|
||||
sudo systemctl status aitbc-miner
|
||||
|
||||
# Restart if needed
|
||||
sudo systemctl restart aitbc-miner
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### GPU Monitoring
|
||||
The dashboard automatically detects NVIDIA GPUs using nvidia-smi.
|
||||
|
||||
### Mining Performance
|
||||
Adjust mining parameters in `miner_service.py`:
|
||||
- Job frequency
|
||||
- Processing duration
|
||||
- Success rates
|
||||
|
||||
### Dashboard Port
|
||||
Change port in `dashboard_server.py` (default: 8080).
|
||||
|
||||
## Security
|
||||
|
||||
- Dashboard runs on localhost by default
|
||||
- No external database required
|
||||
- Minimal dependencies
|
||||
- Read-only GPU monitoring
|
||||
|
||||
## Development
|
||||
|
||||
### Extend Services
|
||||
Add new mining services in the `get_services()` method.
|
||||
|
||||
### Customize UI
|
||||
Modify `index.html` to change the dashboard appearance.
|
||||
|
||||
### Add Metrics
|
||||
Extend the API with new endpoints for additional metrics.
|
||||
|
||||
## License
|
||||
|
||||
AITBC Project - Internal Use Only
|
||||
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=AITBC Miner Dashboard
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/aitbc-miner-dashboard
|
||||
Environment=PYTHONPATH=/opt/aitbc-miner-dashboard
|
||||
ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python dashboard_server.py
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,15 +0,0 @@
|
||||
[Unit]
|
||||
Description=AITBC GPU Mining Service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/aitbc-miner-dashboard
|
||||
Environment=PYTHONPATH=/opt/aitbc-miner-dashboard
|
||||
ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python miner_service.py
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -1,185 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""AITBC Miner Dashboard API - Real-time GPU and mining status"""
|
||||
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
import json
|
||||
import subprocess
|
||||
import psutil
|
||||
from datetime import datetime, timedelta
|
||||
import random
|
||||
|
||||
class MinerDashboardHandler(BaseHTTPRequestHandler):
|
||||
def send_json_response(self, data, status=200):
|
||||
"""Send JSON response"""
|
||||
self.send_response(status)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(data, default=str).encode())
|
||||
|
||||
def do_GET(self):
|
||||
"""Handle GET requests"""
|
||||
if self.path == '/api/gpu-status':
|
||||
self.get_gpu_status()
|
||||
elif self.path == '/api/mining-jobs':
|
||||
self.get_mining_jobs()
|
||||
elif self.path == '/api/statistics':
|
||||
self.get_statistics()
|
||||
elif self.path == '/api/services':
|
||||
self.get_services()
|
||||
elif self.path == '/' or self.path == '/index.html':
|
||||
self.serve_dashboard()
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def get_gpu_status(self):
|
||||
"""Get real GPU status from nvidia-smi"""
|
||||
try:
|
||||
# Parse nvidia-smi output
|
||||
result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,performance_state', '--format=csv,noheader,nounits'],
|
||||
capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
values = result.stdout.strip().split(', ')
|
||||
gpu_data = {
|
||||
'utilization': int(values[0]),
|
||||
'temperature': int(values[1]),
|
||||
'power_usage': float(values[2]),
|
||||
'memory_used': float(values[3]) / 1024, # Convert MB to GB
|
||||
'memory_total': float(values[4]) / 1024,
|
||||
'performance_state': values[5],
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
self.send_json_response(gpu_data)
|
||||
else:
|
||||
# Fallback to mock data
|
||||
self.send_json_response({
|
||||
'utilization': 0,
|
||||
'temperature': 43,
|
||||
'power_usage': 18,
|
||||
'memory_used': 2.9,
|
||||
'memory_total': 16,
|
||||
'performance_state': 'P8',
|
||||
'timestamp': datetime.now().isoformat()
|
||||
})
|
||||
except Exception as e:
|
||||
self.send_json_response({'error': str(e)}, 500)
|
||||
|
||||
def get_mining_jobs(self):
|
||||
"""Get active mining jobs from the miner service"""
|
||||
try:
|
||||
# Connect to miner service via socket or API
|
||||
# For now, simulate with mock data
|
||||
jobs = [
|
||||
{
|
||||
'id': 'job_12345',
|
||||
'name': 'Matrix Computation',
|
||||
'progress': 85,
|
||||
'status': 'running',
|
||||
'started_at': (datetime.now() - timedelta(minutes=10)).isoformat(),
|
||||
'estimated_completion': (datetime.now() + timedelta(minutes=2)).isoformat()
|
||||
},
|
||||
{
|
||||
'id': 'job_12346',
|
||||
'name': 'Hash Validation',
|
||||
'progress': 42,
|
||||
'status': 'running',
|
||||
'started_at': (datetime.now() - timedelta(minutes=5)).isoformat(),
|
||||
'estimated_completion': (datetime.now() + timedelta(minutes=7)).isoformat()
|
||||
}
|
||||
]
|
||||
self.send_json_response(jobs)
|
||||
except Exception as e:
|
||||
self.send_json_response({'error': str(e)}, 500)
|
||||
|
||||
def get_statistics(self):
|
||||
"""Get mining statistics"""
|
||||
stats = {
|
||||
'total_jobs_completed': random.randint(1200, 1300),
|
||||
'average_job_time': round(random.uniform(10, 15), 1),
|
||||
'success_rate': round(random.uniform(95, 99), 1),
|
||||
'total_earned_btc': round(random.uniform(0.004, 0.005), 4),
|
||||
'total_earned_aitbc': random.randint(100, 200),
|
||||
'uptime_hours': 24,
|
||||
'hash_rate': round(random.uniform(45, 55), 1), # MH/s
|
||||
'efficiency': round(random.uniform(0.8, 1.2), 2) # W/MH
|
||||
}
|
||||
self.send_json_response(stats)
|
||||
|
||||
def get_services(self):
|
||||
"""Get available mining services"""
|
||||
services = [
|
||||
{
|
||||
'name': 'GPU Computing',
|
||||
'description': 'CUDA cores available for computation',
|
||||
'status': 'active',
|
||||
'capacity': '100%',
|
||||
'utilization': 65
|
||||
},
|
||||
{
|
||||
'name': 'Parallel Processing',
|
||||
'description': 'Multi-threaded job execution',
|
||||
'status': 'active',
|
||||
'capacity': '8 threads',
|
||||
'utilization': 45
|
||||
},
|
||||
{
|
||||
'name': 'Hash Generation',
|
||||
'description': 'Proof-of-work computation',
|
||||
'status': 'standby',
|
||||
'capacity': '50 MH/s',
|
||||
'utilization': 0
|
||||
},
|
||||
{
|
||||
'name': 'AI Model Training',
|
||||
'description': 'Machine learning operations',
|
||||
'status': 'available',
|
||||
'capacity': '16GB VRAM',
|
||||
'utilization': 0
|
||||
},
|
||||
{
|
||||
'name': 'Blockchain Validation',
|
||||
'description': 'AITBC block validation',
|
||||
'status': 'active',
|
||||
'capacity': '1000 tx/s',
|
||||
'utilization': 23
|
||||
},
|
||||
{
|
||||
'name': 'Data Processing',
|
||||
'description': 'Large dataset processing',
|
||||
'status': 'available',
|
||||
'capacity': '500GB/hour',
|
||||
'utilization': 0
|
||||
}
|
||||
]
|
||||
self.send_json_response(services)
|
||||
|
||||
def serve_dashboard(self):
|
||||
"""Serve the dashboard HTML"""
|
||||
try:
|
||||
with open('index.html', 'r') as f:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(f.read().encode())
|
||||
except FileNotFoundError:
|
||||
self.send_error(404, 'Dashboard not found')
|
||||
|
||||
def run_server(port=8080):
|
||||
"""Run the miner dashboard server"""
|
||||
server = HTTPServer(('localhost', port), MinerDashboardHandler)
|
||||
print(f"""
|
||||
╔═══════════════════════════════════════╗
|
||||
║ AITBC Miner Dashboard Server ║
|
||||
╠═══════════════════════════════════════╣
|
||||
║ Dashboard running at: ║
|
||||
║ http://localhost:{port} ║
|
||||
║ ║
|
||||
║ GPU Monitoring Active! ║
|
||||
║ Real-time Mining Status ║
|
||||
╚═══════════════════════════════════════╝
|
||||
""")
|
||||
server.serve_forever()
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_server()
|
||||
@@ -1,71 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "=== AITBC Miner Dashboard & Service Deployment ==="
|
||||
echo ""
|
||||
|
||||
# Check if running as root
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root (use sudo)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create directories
|
||||
echo "Creating directories..."
|
||||
mkdir -p /opt/aitbc-miner-dashboard
|
||||
mkdir -p /var/log/aitbc-miner
|
||||
|
||||
# Copy files
|
||||
echo "Copying files..."
|
||||
cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/
|
||||
|
||||
# Set permissions
|
||||
chown -R root:root /opt/aitbc-miner-dashboard
|
||||
chmod +x /opt/aitbc-miner-dashboard/*.py
|
||||
chmod +x /opt/aitbc-miner-dashboard/*.sh
|
||||
|
||||
# Create virtual environment
|
||||
echo "Setting up Python environment..."
|
||||
cd /opt/aitbc-miner-dashboard
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install psutil
|
||||
|
||||
# Install systemd services
|
||||
echo "Installing systemd services..."
|
||||
cp aitbc-miner-dashboard.service /etc/systemd/system/
|
||||
cp aitbc-miner.service /etc/systemd/system/
|
||||
|
||||
# Reload systemd
|
||||
systemctl daemon-reload
|
||||
|
||||
# Enable and start services
|
||||
echo "Starting services..."
|
||||
systemctl enable aitbc-miner
|
||||
systemctl enable aitbc-miner-dashboard
|
||||
systemctl start aitbc-miner
|
||||
systemctl start aitbc-miner-dashboard
|
||||
|
||||
# Wait for services to start
|
||||
sleep 5
|
||||
|
||||
# Check status
|
||||
echo ""
|
||||
echo "=== Service Status ==="
|
||||
systemctl status aitbc-miner --no-pager -l | head -5
|
||||
systemctl status aitbc-miner-dashboard --no-pager -l | head -5
|
||||
|
||||
# Get IP address
|
||||
IP=$(hostname -I | awk '{print $1}')
|
||||
|
||||
echo ""
|
||||
echo "✅ Deployment complete!"
|
||||
echo ""
|
||||
echo "Services:"
|
||||
echo " - Miner Service: Running (background)"
|
||||
echo " - Dashboard: http://localhost:8080"
|
||||
echo ""
|
||||
echo "Access from other machines:"
|
||||
echo " http://$IP:8080"
|
||||
echo ""
|
||||
echo "To view logs:"
|
||||
echo " sudo journalctl -u aitbc-miner -f"
|
||||
echo " sudo journalctl -u aitbc-miner-dashboard -f"
|
||||
@@ -1,356 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "========================================"
|
||||
echo " AITBC GPU Miner Dashboard Setup"
|
||||
echo " Running on HOST (at1/localhost)"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
|
||||
# Check if we have GPU access
|
||||
if ! command -v nvidia-smi &> /dev/null; then
|
||||
echo "❌ ERROR: nvidia-smi not found!"
|
||||
echo "Please ensure NVIDIA drivers are installed on the host."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
|
||||
echo ""
|
||||
|
||||
# Create dashboard directory
|
||||
mkdir -p ~/miner-dashboard
|
||||
cd ~/miner-dashboard
|
||||
|
||||
echo "Creating dashboard files..."
|
||||
|
||||
# Create the main dashboard HTML
|
||||
cat > index.html << 'HTML'
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AITBC GPU Miner Dashboard - Host</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
<style>
|
||||
@keyframes pulse-green {
|
||||
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
|
||||
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
|
||||
}
|
||||
.gpu-gradient { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }
|
||||
.status-active { animation: pulse-green 2s infinite; }
|
||||
</style>
|
||||
</head>
|
||||
<body class="bg-gray-900 text-white min-h-screen">
|
||||
<!-- Header -->
|
||||
<header class="bg-gray-800 shadow-xl">
|
||||
<div class="container mx-auto px-6 py-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-4">
|
||||
<i class="fas fa-microchip text-4xl text-purple-500"></i>
|
||||
<div>
|
||||
<h1 class="text-3xl font-bold">AITBC GPU Miner Dashboard</h1>
|
||||
<p class="text-green-400">✓ Running on HOST with direct GPU access</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex items-center bg-green-900/50 px-3 py-1 rounded-full">
|
||||
<span class="w-3 h-3 bg-green-500 rounded-full status-active mr-2"></span>
|
||||
<span>GPU Online</span>
|
||||
</span>
|
||||
<button onclick="location.reload()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
|
||||
<i class="fas fa-sync-alt mr-2"></i>Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Main Content -->
|
||||
<main class="container mx-auto px-6 py-8">
|
||||
<!-- GPU Status Card -->
|
||||
<div class="gpu-gradient rounded-xl p-8 mb-8 text-white shadow-2xl">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div>
|
||||
<h2 class="text-3xl font-bold mb-2" id="gpuName">NVIDIA GeForce RTX 4060 Ti</h2>
|
||||
<p class="text-purple-200">Real-time GPU Performance Monitor</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<div class="text-5xl font-bold" id="gpuUtil">0%</div>
|
||||
<div class="text-purple-200">GPU Utilization</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Temperature</p>
|
||||
<p class="text-2xl font-bold" id="gpuTemp">--°C</p>
|
||||
</div>
|
||||
<i class="fas fa-thermometer-half text-3xl text-orange-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Power Usage</p>
|
||||
<p class="text-2xl font-bold" id="gpuPower">--W</p>
|
||||
</div>
|
||||
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Memory Used</p>
|
||||
<p class="text-2xl font-bold" id="gpuMem">--GB</p>
|
||||
</div>
|
||||
<i class="fas fa-memory text-3xl text-blue-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Performance</p>
|
||||
<p class="text-2xl font-bold" id="gpuPerf">P8</p>
|
||||
</div>
|
||||
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Mining Operations -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
<!-- Active Jobs -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-tasks mr-3 text-green-500"></i>
|
||||
Mining Operations
|
||||
<span id="jobCount" class="ml-auto text-sm text-gray-400">0 active jobs</span>
|
||||
</h3>
|
||||
<div id="jobList" class="space-y-3">
|
||||
<div class="text-center py-8">
|
||||
<i class="fas fa-pause-circle text-6xl text-yellow-500 mb-4"></i>
|
||||
<p class="text-xl font-semibold text-yellow-500">Miner Idle</p>
|
||||
<p class="text-gray-400 mt-2">Ready to accept mining jobs</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- GPU Services -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-server mr-3 text-blue-500"></i>
|
||||
GPU Services Status
|
||||
</h3>
|
||||
<div class="space-y-3">
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
|
||||
<div class="flex items-center">
|
||||
<i class="fas fa-cube text-purple-400 mr-3"></i>
|
||||
<div>
|
||||
<p class="font-semibold">CUDA Computing</p>
|
||||
<p class="text-sm text-gray-400">4352 CUDA cores available</p>
|
||||
</div>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
|
||||
<div class="flex items-center">
|
||||
<i class="fas fa-project-diagram text-blue-400 mr-3"></i>
|
||||
<div>
|
||||
<p class="font-semibold">Parallel Processing</p>
|
||||
<p class="text-sm text-gray-400">Multi-threaded operations</p>
|
||||
</div>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
|
||||
<div class="flex items-center">
|
||||
<i class="fas fa-hashtag text-green-400 mr-3"></i>
|
||||
<div>
|
||||
<p class="font-semibold">Hash Generation</p>
|
||||
<p class="text-sm text-gray-400">Proof-of-work computation</p>
|
||||
</div>
|
||||
</div>
|
||||
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Standby</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center hover:bg-gray-600 transition">
|
||||
<div class="flex items-center">
|
||||
<i class="fas fa-brain text-pink-400 mr-3"></i>
|
||||
<div>
|
||||
<p class="font-semibold">AI Model Training</p>
|
||||
<p class="text-sm text-gray-400">Machine learning operations</p>
|
||||
</div>
|
||||
</div>
|
||||
<span class="bg-gray-600 px-3 py-1 rounded-full text-sm">Available</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Performance Charts -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">GPU Utilization (Last Hour)</h3>
|
||||
<canvas id="utilChart" width="400" height="200"></canvas>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">Hash Rate Performance</h3>
|
||||
<canvas id="hashChart" width="400" height="200"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- System Info -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">System Information</h3>
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||
<div class="bg-gray-700 rounded-lg p-4 text-center">
|
||||
<i class="fas fa-desktop text-3xl text-blue-400 mb-2"></i>
|
||||
<p class="text-sm text-gray-400">Host System</p>
|
||||
<p class="font-semibold text-green-400" id="hostname">Loading...</p>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 text-center">
|
||||
<i class="fas fa-microchip text-3xl text-purple-400 mb-2"></i>
|
||||
<p class="text-sm text-gray-400">GPU Access</p>
|
||||
<p class="font-semibold text-green-400">Direct</p>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 text-center">
|
||||
<i class="fas fa-cube text-3xl text-red-400 mb-2"></i>
|
||||
<p class="text-sm text-gray-400">Container</p>
|
||||
<p class="font-semibold text-red-400">Not Used</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
||||
<script>
|
||||
// Initialize data
|
||||
let utilData = Array(12).fill(0);
|
||||
let hashData = Array(12).fill(0);
|
||||
let utilChart, hashChart;
|
||||
|
||||
// Initialize charts
|
||||
function initCharts() {
|
||||
// Utilization chart
|
||||
const utilCtx = document.getElementById('utilChart').getContext('2d');
|
||||
utilChart = new Chart(utilCtx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
|
||||
datasets: [{
|
||||
label: 'GPU Utilization %',
|
||||
data: utilData,
|
||||
borderColor: 'rgb(147, 51, 234)',
|
||||
backgroundColor: 'rgba(147, 51, 234, 0.1)',
|
||||
tension: 0.4
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
plugins: { legend: { display: false } },
|
||||
scales: {
|
||||
y: { beginAtZero: true, max: 100, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
|
||||
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Hash rate chart
|
||||
const hashCtx = document.getElementById('hashChart').getContext('2d');
|
||||
hashChart = new Chart(hashCtx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
|
||||
datasets: [{
|
||||
label: 'Hash Rate (MH/s)',
|
||||
data: hashData,
|
||||
borderColor: 'rgb(34, 197, 94)',
|
||||
backgroundColor: 'rgba(34, 197, 94, 0.1)',
|
||||
tension: 0.4
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
plugins: { legend: { display: false } },
|
||||
scales: {
|
||||
y: { beginAtZero: true, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
|
||||
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Update GPU metrics
|
||||
function updateGPU() {
|
||||
// Simulate GPU metrics (in real implementation, fetch from API)
|
||||
const util = Math.random() * 15; // Idle utilization 0-15%
|
||||
const temp = 43 + Math.random() * 10;
|
||||
const power = 18 + util * 0.5;
|
||||
const mem = 2.9 + Math.random() * 0.5;
|
||||
const hash = util * 2.5; // Simulated hash rate
|
||||
|
||||
// Update display
|
||||
document.getElementById('gpuUtil').textContent = Math.round(util) + '%';
|
||||
document.getElementById('gpuTemp').textContent = Math.round(temp) + '°C';
|
||||
document.getElementById('gpuPower').textContent = Math.round(power) + 'W';
|
||||
document.getElementById('gpuMem').textContent = mem.toFixed(1) + 'GB';
|
||||
|
||||
// Update charts
|
||||
utilData.shift();
|
||||
utilData.push(util);
|
||||
utilChart.update('none');
|
||||
|
||||
hashData.shift();
|
||||
hashData.push(hash);
|
||||
hashChart.update('none');
|
||||
}
|
||||
|
||||
// Load system info
|
||||
function loadSystemInfo() {
|
||||
document.getElementById('hostname').textContent = window.location.hostname;
|
||||
}
|
||||
|
||||
// Initialize
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
initCharts();
|
||||
loadSystemInfo();
|
||||
updateGPU();
|
||||
setInterval(updateGPU, 5000);
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
HTML
|
||||
|
||||
# Create startup script
|
||||
cat > start-dashboard.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
cd ~/miner-dashboard
|
||||
echo ""
|
||||
echo "========================================"
|
||||
echo " Starting AITBC GPU Miner Dashboard"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
echo "Dashboard will be available at:"
|
||||
echo " Local: http://localhost:8080"
|
||||
echo " Network: http://$(hostname -I | awk '{print $1}'):8080"
|
||||
echo ""
|
||||
echo "Press Ctrl+C to stop the dashboard"
|
||||
echo ""
|
||||
python3 -m http.server 8080 --bind 0.0.0.0
|
||||
EOF
|
||||
|
||||
chmod +x start-dashboard.sh
|
||||
|
||||
echo ""
|
||||
echo "✅ Dashboard setup complete!"
|
||||
echo ""
|
||||
echo "To start the dashboard, run:"
|
||||
echo " ~/miner-dashboard/start-dashboard.sh"
|
||||
echo ""
|
||||
echo "Dashboard location: ~/miner-dashboard/"
|
||||
echo ""
|
||||
echo "========================================"
|
||||
@@ -1,313 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "=== AITBC Miner Dashboard - Host Deployment ==="
|
||||
echo ""
|
||||
|
||||
# Check if running on host with GPU
|
||||
if ! command -v nvidia-smi &> /dev/null; then
|
||||
echo "❌ nvidia-smi not found. Please install NVIDIA drivers."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create directory
|
||||
mkdir -p ~/miner-dashboard
|
||||
cd ~/miner-dashboard
|
||||
|
||||
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
|
||||
|
||||
# Create dashboard HTML
|
||||
cat > index.html << 'EOF'
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AITBC GPU Miner Dashboard</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
<style>
|
||||
@keyframes pulse-green {
|
||||
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
|
||||
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
|
||||
}
|
||||
.status-online { animation: pulse-green 2s infinite; }
|
||||
.gpu-card { background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); }
|
||||
</style>
|
||||
</head>
|
||||
<body class="bg-gray-900 text-white min-h-screen">
|
||||
<header class="bg-gray-800 shadow-lg">
|
||||
<div class="container mx-auto px-6 py-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-4">
|
||||
<i class="fas fa-microchip text-3xl text-purple-500"></i>
|
||||
<div>
|
||||
<h1 class="text-2xl font-bold">AITBC Miner Dashboard</h1>
|
||||
<p class="text-sm text-gray-400">Host GPU Mining Operations</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex items-center">
|
||||
<span class="w-3 h-3 bg-green-500 rounded-full status-online mr-2"></span>
|
||||
<span class="text-sm">GPU Connected</span>
|
||||
</span>
|
||||
<button onclick="refreshData()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
|
||||
<i class="fas fa-sync-alt mr-2"></i>Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main class="container mx-auto px-6 py-8">
|
||||
<!-- GPU Status -->
|
||||
<div class="gpu-card rounded-xl p-6 mb-8 text-white">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div>
|
||||
<h2 class="text-3xl font-bold mb-2" id="gpuName">Loading...</h2>
|
||||
<p class="text-purple-200">Real-time GPU Status</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<div class="text-4xl font-bold" id="gpuUtil">0%</div>
|
||||
<div class="text-purple-200">GPU Utilization</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Temperature</p>
|
||||
<p class="text-2xl font-bold" id="gpuTemp">--°C</p>
|
||||
</div>
|
||||
<i class="fas fa-thermometer-half text-3xl text-purple-300"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Power Usage</p>
|
||||
<p class="text-2xl font-bold" id="gpuPower">--W</p>
|
||||
</div>
|
||||
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Memory Used</p>
|
||||
<p class="text-2xl font-bold" id="gpuMem">--GB</p>
|
||||
</div>
|
||||
<i class="fas fa-memory text-3xl text-blue-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Performance</p>
|
||||
<p class="text-2xl font-bold" id="gpuPerf">--</p>
|
||||
</div>
|
||||
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Mining Status -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
<!-- Active Jobs -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-tasks mr-3 text-green-500"></i>
|
||||
Mining Status
|
||||
</h3>
|
||||
<div class="text-center py-8">
|
||||
<i class="fas fa-pause-circle text-6xl text-yellow-500 mb-4"></i>
|
||||
<p class="text-xl font-semibold text-yellow-500">Miner Idle</p>
|
||||
<p class="text-gray-400 mt-2">Ready to accept mining jobs</p>
|
||||
<button onclick="startMiner()" class="mt-4 bg-green-600 hover:bg-green-700 px-6 py-2 rounded-lg transition">
|
||||
<i class="fas fa-play mr-2"></i>Start Mining
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Services -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-server mr-3 text-blue-500"></i>
|
||||
GPU Services Available
|
||||
</h3>
|
||||
<div class="space-y-3">
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">GPU Computing</p>
|
||||
<p class="text-sm text-gray-400">CUDA cores ready</p>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">Hash Generation</p>
|
||||
<p class="text-sm text-gray-400">Proof-of-work capable</p>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">AI Model Training</p>
|
||||
<p class="text-sm text-gray-400">ML operations ready</p>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Available</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Info -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">System Information</h3>
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">Host System</p>
|
||||
<p class="font-semibold" id="hostname">Loading...</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">GPU Driver</p>
|
||||
<p class="font-semibold" id="driver">Loading...</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">CUDA Version</p>
|
||||
<p class="font-semibold" id="cuda">Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<script>
|
||||
// Load GPU info
|
||||
async function loadGPUInfo() {
|
||||
try {
|
||||
const response = await fetch('/api/gpu');
|
||||
const data = await response.json();
|
||||
|
||||
document.getElementById('gpuName').textContent = data.name;
|
||||
document.getElementById('gpuUtil').textContent = data.utilization + '%';
|
||||
document.getElementById('gpuTemp').textContent = data.temperature + '°C';
|
||||
document.getElementById('gpuPower').textContent = data.power + 'W';
|
||||
document.getElementById('gpuMem').textContent = data.memory_used + 'GB / ' + data.memory_total + 'GB';
|
||||
document.getElementById('gpuPerf').textContent = data.performance_state;
|
||||
document.getElementById('hostname').textContent = data.hostname;
|
||||
document.getElementById('driver').textContent = data.driver_version;
|
||||
document.getElementById('cuda').textContent = data.cuda_version;
|
||||
} catch (e) {
|
||||
console.error('Failed to load GPU info:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Refresh data
|
||||
function refreshData() {
|
||||
const btn = document.querySelector('button[onclick="refreshData()"]');
|
||||
btn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Refreshing...';
|
||||
|
||||
loadGPUInfo().then(() => {
|
||||
btn.innerHTML = '<i class="fas fa-sync-alt mr-2"></i>Refresh';
|
||||
});
|
||||
}
|
||||
|
||||
// Start miner (placeholder)
|
||||
function startMiner() {
|
||||
alert('Miner service would start here. This is a demo dashboard.');
|
||||
}
|
||||
|
||||
// Initialize
|
||||
loadGPUInfo();
|
||||
setInterval(loadGPUInfo, 5000);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
EOF
|
||||
|
||||
# Create Python server with API
|
||||
cat > server.py << 'EOF'
|
||||
import json
|
||||
import subprocess
|
||||
import socket
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from urllib.parse import urlparse
|
||||
|
||||
class MinerHandler(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
if self.path == '/api/gpu':
|
||||
self.send_json(self.get_gpu_info())
|
||||
elif self.path == '/' or self.path == '/index.html':
|
||||
self.serve_file('index.html')
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def get_gpu_info(self):
|
||||
try:
|
||||
# Get GPU info
|
||||
result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,driver_version,cuda_version', '--format=csv,noheader,nounits'],
|
||||
capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
values = result.stdout.strip().split(', ')
|
||||
return {
|
||||
'name': values[0],
|
||||
'utilization': int(values[1]),
|
||||
'temperature': int(values[2]),
|
||||
'power': float(values[3]),
|
||||
'memory_used': float(values[4]) / 1024,
|
||||
'memory_total': float(values[5]) / 1024,
|
||||
'driver_version': values[6],
|
||||
'cuda_version': values[7],
|
||||
'hostname': socket.gethostname(),
|
||||
'performance_state': 'P8' # Would need additional query
|
||||
}
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
def send_json(self, data):
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(data).encode())
|
||||
|
||||
def serve_file(self, filename):
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(f.read().encode())
|
||||
except FileNotFoundError:
|
||||
self.send_error(404)
|
||||
|
||||
if __name__ == '__main__':
|
||||
server = HTTPServer(('0.0.0.0', 8080), MinerHandler)
|
||||
print('''
|
||||
╔═══════════════════════════════════════╗
|
||||
║ AITBC Miner Dashboard ║
|
||||
║ Running on HOST with GPU access ║
|
||||
╠═══════════════════════════════════════╣
|
||||
║ Dashboard: http://localhost:8080 ║
|
||||
║ Host: $(hostname) ║
|
||||
║ GPU: $(nvidia-smi --query-gpu=name --format=csv,noheader) ║
|
||||
╚═══════════════════════════════════════╝
|
||||
''')
|
||||
server.serve_forever()
|
||||
EOF
|
||||
|
||||
# Make server executable
|
||||
chmod +x server.py
|
||||
|
||||
echo ""
|
||||
echo "✅ Dashboard created!"
|
||||
echo ""
|
||||
echo "To start the dashboard:"
|
||||
echo " cd ~/miner-dashboard"
|
||||
echo " python3 server.py"
|
||||
echo ""
|
||||
echo "Then access at: http://localhost:8080"
|
||||
echo ""
|
||||
echo "To auto-start on boot, add to crontab:"
|
||||
echo " @reboot cd ~/miner-dashboard && python3 server.py &"
|
||||
@@ -1,189 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "=== AITBC Miner Dashboard - Host Setup ==="
|
||||
echo ""
|
||||
echo "This script sets up the dashboard on the HOST machine (at1)"
|
||||
echo "NOT in the container (aitbc)"
|
||||
echo ""
|
||||
|
||||
# Check if we have GPU access
|
||||
if ! command -v nvidia-smi &> /dev/null; then
|
||||
echo "❌ ERROR: nvidia-smi not found!"
|
||||
echo "This script must be run on the HOST with GPU access"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)"
|
||||
|
||||
# Create dashboard directory
|
||||
mkdir -p ~/miner-dashboard
|
||||
cd ~/miner-dashboard
|
||||
|
||||
# Create HTML dashboard
|
||||
cat > index.html << 'HTML'
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>AITBC GPU Miner Dashboard - HOST</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
</head>
|
||||
<body class="bg-gray-900 text-white min-h-screen">
|
||||
<div class="container mx-auto px-6 py-8">
|
||||
<header class="mb-8">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-4">
|
||||
<i class="fas fa-microchip text-4xl text-purple-500"></i>
|
||||
<div>
|
||||
<h1 class="text-3xl font-bold">AITBC GPU Miner Dashboard</h1>
|
||||
<p class="text-gray-400">Running on HOST with direct GPU access</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-2">
|
||||
<span class="w-3 h-3 bg-green-500 rounded-full animate-pulse"></span>
|
||||
<span class="text-green-500">GPU Connected</span>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="bg-gradient-to-r from-purple-600 to-blue-600 rounded-xl p-8 mb-8 text-white">
|
||||
<h2 class="text-2xl font-bold mb-6">GPU Status Monitor</h2>
|
||||
<div class="grid grid-cols-2 md:grid-cols-4 gap-6">
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
|
||||
<i class="fas fa-chart-line text-3xl mb-2"></i>
|
||||
<p class="text-sm opacity-80">Utilization</p>
|
||||
<p class="text-3xl font-bold" id="utilization">0%</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
|
||||
<i class="fas fa-thermometer-half text-3xl mb-2"></i>
|
||||
<p class="text-sm opacity-80">Temperature</p>
|
||||
<p class="text-3xl font-bold" id="temperature">--°C</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
|
||||
<i class="fas fa-bolt text-3xl mb-2"></i>
|
||||
<p class="text-sm opacity-80">Power</p>
|
||||
<p class="text-3xl font-bold" id="power">--W</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4 text-center">
|
||||
<i class="fas fa-memory text-3xl mb-2"></i>
|
||||
<p class="text-sm opacity-80">Memory</p>
|
||||
<p class="text-3xl font-bold" id="memory">--GB</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-cog text-green-500 mr-2"></i>
|
||||
Mining Operations
|
||||
</h3>
|
||||
<div class="space-y-4">
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<div class="flex justify-between items-center mb-2">
|
||||
<span class="font-semibold">Status</span>
|
||||
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Idle</span>
|
||||
</div>
|
||||
<p class="text-sm text-gray-400">Miner is ready to accept jobs</p>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<div class="flex justify-between items-center mb-2">
|
||||
<span class="font-semibold">Hash Rate</span>
|
||||
<span class="text-green-400">0 MH/s</span>
|
||||
</div>
|
||||
<div class="w-full bg-gray-600 rounded-full h-2">
|
||||
<div class="bg-green-500 h-2 rounded-full" style="width: 0%"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-server text-blue-500 mr-2"></i>
|
||||
GPU Services
|
||||
</h3>
|
||||
<div class="space-y-3">
|
||||
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
|
||||
<span>CUDA Computing</span>
|
||||
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
|
||||
</div>
|
||||
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
|
||||
<span>Parallel Processing</span>
|
||||
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
|
||||
</div>
|
||||
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
|
||||
<span>Hash Generation</span>
|
||||
<span class="bg-yellow-600 px-2 py-1 rounded text-xs">Standby</span>
|
||||
</div>
|
||||
<div class="flex justify-between items-center p-3 bg-gray-700 rounded-lg">
|
||||
<span>AI Model Training</span>
|
||||
<span class="bg-gray-600 px-2 py-1 rounded text-xs">Available</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-8 bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">System Information</h3>
|
||||
<div class="grid grid-cols-3 gap-6 text-center">
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">Location</p>
|
||||
<p class="font-semibold text-green-400">HOST System</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">GPU Access</p>
|
||||
<p class="font-semibold text-green-400">Direct</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-sm text-gray-400">Container</p>
|
||||
<p class="font-semibold text-red-400">Not Used</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Simulate real-time GPU data
|
||||
function updateGPU() {
|
||||
// In real implementation, this would fetch from an API
|
||||
const util = Math.random() * 20; // 0-20% idle usage
|
||||
const temp = 43 + Math.random() * 10;
|
||||
const power = 18 + util * 0.5;
|
||||
const mem = 2.9 + Math.random() * 0.5;
|
||||
|
||||
document.getElementById('utilization').textContent = Math.round(util) + '%';
|
||||
document.getElementById('temperature').textContent = Math.round(temp) + '°C';
|
||||
document.getElementById('power').textContent = Math.round(power) + 'W';
|
||||
document.getElementById('memory').textContent = mem.toFixed(1) + 'GB';
|
||||
}
|
||||
|
||||
// Update every 2 seconds
|
||||
setInterval(updateGPU, 2000);
|
||||
updateGPU();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
HTML
|
||||
|
||||
# Create simple server
|
||||
cat > serve.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
cd ~/miner-dashboard
|
||||
echo "Starting GPU Miner Dashboard on HOST..."
|
||||
echo "Access at: http://localhost:8080"
|
||||
echo "Press Ctrl+C to stop"
|
||||
python3 -m http.server 8080 --bind 0.0.0.0
|
||||
EOF
|
||||
|
||||
chmod +x serve.sh
|
||||
|
||||
echo ""
|
||||
echo "✅ Dashboard created on HOST!"
|
||||
echo ""
|
||||
echo "To run the dashboard:"
|
||||
echo " ~/miner-dashboard/serve.sh"
|
||||
echo ""
|
||||
echo "Dashboard will be available at:"
|
||||
echo " - Local: http://localhost:8080"
|
||||
echo " - Network: http://$(hostname -I | awk '{print $1}'):8080"
|
||||
@@ -1,449 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>AITBC Miner Dashboard</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
<style>
|
||||
@keyframes pulse-green {
|
||||
0%, 100% { box-shadow: 0 0 0 0 rgba(34, 197, 94, 0.7); }
|
||||
50% { box-shadow: 0 0 0 10px rgba(34, 197, 94, 0); }
|
||||
}
|
||||
.status-online { animation: pulse-green 2s infinite; }
|
||||
.gpu-card {
|
||||
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||
}
|
||||
.metric-card {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
backdrop-filter: blur(10px);
|
||||
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body class="bg-gray-900 text-white min-h-screen">
|
||||
<!-- Header -->
|
||||
<header class="bg-gray-800 shadow-lg">
|
||||
<div class="container mx-auto px-6 py-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center space-x-4">
|
||||
<i class="fas fa-microchip text-3xl text-purple-500"></i>
|
||||
<div>
|
||||
<h1 class="text-2xl font-bold">AITBC Miner Dashboard</h1>
|
||||
<p class="text-sm text-gray-400">GPU Mining Operations Monitor</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span id="connectionStatus" class="flex items-center">
|
||||
<span class="w-3 h-3 bg-green-500 rounded-full status-online mr-2"></span>
|
||||
<span class="text-sm">Connected</span>
|
||||
</span>
|
||||
<button onclick="refreshData()" class="bg-purple-600 hover:bg-purple-700 px-4 py-2 rounded-lg transition">
|
||||
<i class="fas fa-sync-alt mr-2"></i>Refresh
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<!-- Main Content -->
|
||||
<main class="container mx-auto px-6 py-8">
|
||||
<!-- GPU Status Card -->
|
||||
<div class="gpu-card rounded-xl p-6 mb-8 text-white">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div>
|
||||
<h2 class="text-3xl font-bold mb-2">NVIDIA GeForce RTX 4060 Ti</h2>
|
||||
<p class="text-purple-200">GPU Status & Performance</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<div class="text-4xl font-bold" id="gpuUtilization">0%</div>
|
||||
<div class="text-purple-200">GPU Utilization</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||
<div class="metric-card rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Temperature</p>
|
||||
<p class="text-2xl font-bold" id="gpuTemp">43°C</p>
|
||||
</div>
|
||||
<i class="fas fa-thermometer-half text-3xl text-purple-300"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="metric-card rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Power Usage</p>
|
||||
<p class="text-2xl font-bold" id="powerUsage">18W</p>
|
||||
</div>
|
||||
<i class="fas fa-bolt text-3xl text-yellow-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="metric-card rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Memory Used</p>
|
||||
<p class="text-2xl font-bold" id="memoryUsage">2.9GB</p>
|
||||
</div>
|
||||
<i class="fas fa-memory text-3xl text-blue-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
<div class="metric-card rounded-lg p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-purple-200 text-sm">Performance</p>
|
||||
<p class="text-2xl font-bold" id="perfState">P8</p>
|
||||
</div>
|
||||
<i class="fas fa-tachometer-alt text-3xl text-green-400"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Mining Services -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
<!-- Active Mining Jobs -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-tasks mr-3 text-green-500"></i>
|
||||
Active Mining Jobs
|
||||
</h3>
|
||||
<div id="miningJobs" class="space-y-3">
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<div class="flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">Matrix Computation</p>
|
||||
<p class="text-sm text-gray-400">Job ID: #12345</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<p class="text-green-400 font-semibold">85%</p>
|
||||
<p class="text-xs text-gray-400">Complete</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-3 bg-gray-600 rounded-full h-2">
|
||||
<div class="bg-green-500 h-2 rounded-full" style="width: 85%"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<div class="flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">Hash Validation</p>
|
||||
<p class="text-sm text-gray-400">Job ID: #12346</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<p class="text-yellow-400 font-semibold">42%</p>
|
||||
<p class="text-xs text-gray-400">Complete</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-3 bg-gray-600 rounded-full h-2">
|
||||
<div class="bg-yellow-500 h-2 rounded-full" style="width: 42%"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Mining Services -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-server mr-3 text-blue-500"></i>
|
||||
Available Services
|
||||
</h3>
|
||||
<div class="space-y-3">
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">GPU Computing</p>
|
||||
<p class="text-sm text-gray-400">CUDA cores available for computation</p>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">Parallel Processing</p>
|
||||
<p class="text-sm text-gray-400">Multi-threaded job execution</p>
|
||||
</div>
|
||||
<span class="bg-green-600 px-3 py-1 rounded-full text-sm">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">Hash Generation</p>
|
||||
<p class="text-sm text-gray-400">Proof-of-work computation</p>
|
||||
</div>
|
||||
<span class="bg-yellow-600 px-3 py-1 rounded-full text-sm">Standby</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">AI Model Training</p>
|
||||
<p class="text-sm text-gray-400">Machine learning operations</p>
|
||||
</div>
|
||||
<span class="bg-gray-600 px-3 py-1 rounded-full text-sm">Available</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Performance Charts -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-8">
|
||||
<!-- GPU Utilization Chart -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">GPU Utilization (Last Hour)</h3>
|
||||
<canvas id="utilizationChart"></canvas>
|
||||
</div>
|
||||
|
||||
<!-- Hash Rate Chart -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">Hash Rate Performance</h3>
|
||||
<canvas id="hashRateChart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Statistics -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4 mb-8">
|
||||
<div class="bg-gray-800 rounded-lg p-4 text-center">
|
||||
<p class="text-gray-400 text-sm">Total Jobs Completed</p>
|
||||
<p class="text-3xl font-bold text-green-500" id="totalJobs">0</p>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4 text-center">
|
||||
<p class="text-gray-400 text-sm">Average Job Time</p>
|
||||
<p class="text-3xl font-bold text-blue-500" id="avgJobTime">0s</p>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4 text-center">
|
||||
<p class="text-gray-400 text-sm">Success Rate</p>
|
||||
<p class="text-3xl font-bold text-purple-500" id="successRate">0%</p>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg p-4 text-center">
|
||||
<p class="text-gray-400 text-sm">Hash Rate</p>
|
||||
<p class="text-3xl font-bold text-yellow-500" id="hashRate">0 MH/s</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Service Details -->
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">Service Capabilities</h3>
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4" id="serviceDetails">
|
||||
<!-- Service details will be loaded here -->
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<script>
|
||||
// Chart instances
|
||||
let utilizationChart, hashRateChart;
|
||||
|
||||
// Initialize dashboard
|
||||
async function initDashboard() {
|
||||
await loadGPUStatus();
|
||||
await loadMiningJobs();
|
||||
await loadServices();
|
||||
await loadStatistics();
|
||||
initCharts();
|
||||
|
||||
// Auto-refresh every 5 seconds
|
||||
setInterval(refreshData, 5000);
|
||||
}
|
||||
|
||||
// Load GPU status
|
||||
async function loadGPUStatus() {
|
||||
try {
|
||||
const response = await fetch('/api/gpu-status');
|
||||
const data = await response.json();
|
||||
|
||||
document.getElementById('gpuUtilization').textContent = data.utilization + '%';
|
||||
document.getElementById('gpuTemp').textContent = data.temperature + '°C';
|
||||
document.getElementById('powerUsage').textContent = data.power_usage + 'W';
|
||||
document.getElementById('memoryUsage').textContent = data.memory_used.toFixed(1) + 'GB';
|
||||
document.getElementById('perfState').textContent = data.performance_state;
|
||||
|
||||
// Update utilization chart
|
||||
if (utilizationChart) {
|
||||
utilizationChart.data.datasets[0].data.shift();
|
||||
utilizationChart.data.datasets[0].data.push(data.utilization);
|
||||
utilizationChart.update('none');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load GPU status:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load mining jobs
|
||||
async function loadMiningJobs() {
|
||||
try {
|
||||
const response = await fetch('/api/mining-jobs');
|
||||
const jobs = await response.json();
|
||||
|
||||
const jobsContainer = document.getElementById('miningJobs');
|
||||
document.getElementById('jobCount').textContent = jobs.length + ' jobs';
|
||||
|
||||
if (jobs.length === 0) {
|
||||
jobsContainer.innerHTML = `
|
||||
<div class="text-center text-gray-500 py-8">
|
||||
<i class="fas fa-inbox text-4xl mb-3"></i>
|
||||
<p>No active jobs</p>
|
||||
</div>
|
||||
`;
|
||||
} else {
|
||||
jobsContainer.innerHTML = jobs.map(job => `
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<div class="flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">${job.name}</p>
|
||||
<p class="text-sm text-gray-400">Job ID: #${job.id}</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<p class="text-${job.progress > 70 ? 'green' : job.progress > 30 ? 'yellow' : 'red'}-400 font-semibold">${job.progress}%</p>
|
||||
<p class="text-xs text-gray-400">${job.status}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-3 bg-gray-600 rounded-full h-2">
|
||||
<div class="bg-${job.progress > 70 ? 'green' : job.progress > 30 ? 'yellow' : 'red'}-500 h-2 rounded-full transition-all duration-500" style="width: ${job.progress}%"></div>
|
||||
</div>
|
||||
</div>
|
||||
`).join('');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load mining jobs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load services
|
||||
async function loadServices() {
|
||||
try {
|
||||
const response = await fetch('/api/services');
|
||||
const services = await response.json();
|
||||
|
||||
const servicesContainer = document.getElementById('miningServices');
|
||||
servicesContainer.innerHTML = services.map(service => `
|
||||
<div class="bg-gray-700 rounded-lg p-4 flex justify-between items-center">
|
||||
<div>
|
||||
<p class="font-semibold">${service.name}</p>
|
||||
<p class="text-sm text-gray-400">${service.description}</p>
|
||||
</div>
|
||||
<span class="bg-${service.status === 'active' ? 'green' : service.status === 'standby' ? 'yellow' : 'gray'}-600 px-3 py-1 rounded-full text-sm">
|
||||
${service.status}
|
||||
</span>
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
// Load service details
|
||||
const detailsContainer = document.getElementById('serviceDetails');
|
||||
detailsContainer.innerHTML = services.map(service => `
|
||||
<div class="bg-gray-700 rounded-lg p-4">
|
||||
<h4 class="font-semibold mb-2">${service.name}</h4>
|
||||
<p class="text-sm text-gray-400 mb-3">${service.description}</p>
|
||||
<div class="space-y-2">
|
||||
<div class="flex justify-between text-sm">
|
||||
<span>Capacity:</span>
|
||||
<span>${service.capacity}</span>
|
||||
</div>
|
||||
<div class="flex justify-between text-sm">
|
||||
<span>Utilization:</span>
|
||||
<span>${service.utilization}%</span>
|
||||
</div>
|
||||
<div class="bg-gray-600 rounded-full h-2 mt-2">
|
||||
<div class="bg-blue-500 h-2 rounded-full" style="width: ${service.utilization}%"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`).join('');
|
||||
} catch (error) {
|
||||
console.error('Failed to load services:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Load statistics
|
||||
async function loadStatistics() {
|
||||
try {
|
||||
const response = await fetch('/api/statistics');
|
||||
const stats = await response.json();
|
||||
|
||||
document.getElementById('totalJobs').textContent = stats.total_jobs_completed.toLocaleString();
|
||||
document.getElementById('avgJobTime').textContent = stats.average_job_time + 's';
|
||||
document.getElementById('successRate').textContent = stats.success_rate + '%';
|
||||
document.getElementById('hashRate').textContent = stats.hash_rate + ' MH/s';
|
||||
|
||||
// Update hash rate chart
|
||||
if (hashRateChart) {
|
||||
hashRateChart.data.datasets[0].data.shift();
|
||||
hashRateChart.data.datasets[0].data.push(stats.hash_rate);
|
||||
hashRateChart.update('none');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load statistics:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize charts
|
||||
function initCharts() {
|
||||
// Utilization chart
|
||||
const utilizationCtx = document.getElementById('utilizationChart').getContext('2d');
|
||||
utilizationChart = new Chart(utilizationCtx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
|
||||
datasets: [{
|
||||
label: 'GPU Utilization %',
|
||||
data: Array(12).fill(0),
|
||||
borderColor: 'rgb(147, 51, 234)',
|
||||
backgroundColor: 'rgba(147, 51, 234, 0.1)',
|
||||
tension: 0.4
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
animation: { duration: 0 },
|
||||
plugins: { legend: { display: false } },
|
||||
scales: {
|
||||
y: { beginAtZero: true, max: 100, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
|
||||
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Hash rate chart
|
||||
const hashRateCtx = document.getElementById('hashRateChart').getContext('2d');
|
||||
hashRateChart = new Chart(hashRateCtx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: Array.from({length: 12}, (_, i) => `${60-i*5}m`),
|
||||
datasets: [{
|
||||
label: 'Hash Rate (MH/s)',
|
||||
data: Array(12).fill(0),
|
||||
borderColor: 'rgb(34, 197, 94)',
|
||||
backgroundColor: 'rgba(34, 197, 94, 0.1)',
|
||||
tension: 0.4
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
animation: { duration: 0 },
|
||||
plugins: { legend: { display: false } },
|
||||
scales: {
|
||||
y: { beginAtZero: true, ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } },
|
||||
x: { ticks: { color: '#9CA3AF' }, grid: { color: '#374151' } }
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Refresh all data
|
||||
async function refreshData() {
|
||||
const refreshBtn = document.querySelector('button[onclick="refreshData()"]');
|
||||
refreshBtn.innerHTML = '<i class="fas fa-spinner fa-spin mr-2"></i>Refreshing...';
|
||||
|
||||
await Promise.all([
|
||||
loadGPUStatus(),
|
||||
loadMiningJobs(),
|
||||
loadServices(),
|
||||
loadStatistics()
|
||||
]);
|
||||
|
||||
refreshBtn.innerHTML = '<i class="fas fa-sync-alt mr-2"></i>Refresh';
|
||||
}
|
||||
|
||||
// Initialize on load
|
||||
document.addEventListener('DOMContentLoaded', initDashboard);
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,181 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""AITBC GPU Mining Service"""
|
||||
|
||||
import subprocess
|
||||
import time
|
||||
import json
|
||||
import random
|
||||
from datetime import datetime
|
||||
import threading
|
||||
|
||||
class AITBCMiner:
|
||||
def __init__(self):
|
||||
self.running = False
|
||||
self.jobs = []
|
||||
self.stats = {
|
||||
'total_jobs': 0,
|
||||
'completed_jobs': 0,
|
||||
'failed_jobs': 0,
|
||||
'hash_rate': 0,
|
||||
'uptime': 0
|
||||
}
|
||||
self.start_time = None
|
||||
|
||||
def start_mining(self):
|
||||
"""Start the mining service"""
|
||||
self.running = True
|
||||
self.start_time = time.time()
|
||||
print("🚀 AITBC Miner started")
|
||||
|
||||
# Start mining threads
|
||||
mining_thread = threading.Thread(target=self._mining_loop)
|
||||
mining_thread.daemon = True
|
||||
mining_thread.start()
|
||||
|
||||
# Start status monitoring
|
||||
monitor_thread = threading.Thread(target=self._monitor_gpu)
|
||||
monitor_thread.daemon = True
|
||||
monitor_thread.start()
|
||||
|
||||
def stop_mining(self):
|
||||
"""Stop the mining service"""
|
||||
self.running = False
|
||||
print("⛔ AITBC Miner stopped")
|
||||
|
||||
def _mining_loop(self):
|
||||
"""Main mining loop"""
|
||||
while self.running:
|
||||
# Simulate job processing
|
||||
if random.random() > 0.7: # 30% chance of new job
|
||||
job = self._create_job()
|
||||
self.jobs.append(job)
|
||||
self._process_job(job)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
def _create_job(self):
|
||||
"""Create a new mining job"""
|
||||
job_types = [
|
||||
'Matrix Computation',
|
||||
'Hash Validation',
|
||||
'Block Verification',
|
||||
'Transaction Processing',
|
||||
'AI Model Training'
|
||||
]
|
||||
|
||||
job = {
|
||||
'id': f"job_{int(time.time())}_{random.randint(1000, 9999)}",
|
||||
'name': random.choice(job_types),
|
||||
'progress': 0,
|
||||
'status': 'running',
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
self.stats['total_jobs'] += 1
|
||||
return job
|
||||
|
||||
def _process_job(self, job):
|
||||
"""Process a mining job"""
|
||||
processing_thread = threading.Thread(target=self._process_job_thread, args=(job,))
|
||||
processing_thread.daemon = True
|
||||
processing_thread.start()
|
||||
|
||||
def _process_job_thread(self, job):
|
||||
"""Process job in separate thread"""
|
||||
duration = random.randint(5, 30)
|
||||
steps = 20
|
||||
|
||||
for i in range(steps + 1):
|
||||
if not self.running:
|
||||
break
|
||||
|
||||
job['progress'] = int((i / steps) * 100)
|
||||
time.sleep(duration / steps)
|
||||
|
||||
if self.running:
|
||||
job['status'] = 'completed' if random.random() > 0.05 else 'failed'
|
||||
job['completed_at'] = datetime.now().isoformat()
|
||||
|
||||
if job['status'] == 'completed':
|
||||
self.stats['completed_jobs'] += 1
|
||||
else:
|
||||
self.stats['failed_jobs'] += 1
|
||||
|
||||
def _monitor_gpu(self):
|
||||
"""Monitor GPU status"""
|
||||
while self.running:
|
||||
try:
|
||||
# Get GPU utilization
|
||||
result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu', '--format=csv,noheader,nounits'],
|
||||
capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
gpu_util = int(result.stdout.strip())
|
||||
# Simulate hash rate based on GPU utilization
|
||||
self.stats['hash_rate'] = round(gpu_util * 0.5 + random.uniform(-5, 5), 1)
|
||||
|
||||
except Exception as e:
|
||||
print(f"GPU monitoring error: {e}")
|
||||
self.stats['hash_rate'] = random.uniform(40, 60)
|
||||
|
||||
# Update uptime
|
||||
if self.start_time:
|
||||
self.stats['uptime'] = int(time.time() - self.start_time)
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
def get_status(self):
|
||||
"""Get current mining status"""
|
||||
return {
|
||||
'running': self.running,
|
||||
'stats': self.stats.copy(),
|
||||
'active_jobs': [j for j in self.jobs if j['status'] == 'running'],
|
||||
'gpu_info': self._get_gpu_info()
|
||||
}
|
||||
|
||||
def _get_gpu_info(self):
|
||||
"""Get GPU information"""
|
||||
try:
|
||||
result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total',
|
||||
'--format=csv,noheader,nounits'],
|
||||
capture_output=True, text=True)
|
||||
|
||||
if result.returncode == 0:
|
||||
values = result.stdout.strip().split(', ')
|
||||
return {
|
||||
'name': values[0],
|
||||
'utilization': int(values[1]),
|
||||
'temperature': int(values[2]),
|
||||
'power': float(values[3]),
|
||||
'memory_used': float(values[4]),
|
||||
'memory_total': float(values[5])
|
||||
}
|
||||
except:
|
||||
pass
|
||||
|
||||
return {
|
||||
'name': 'NVIDIA GeForce RTX 4060 Ti',
|
||||
'utilization': 0,
|
||||
'temperature': 43,
|
||||
'power': 18,
|
||||
'memory_used': 2902,
|
||||
'memory_total': 16380
|
||||
}
|
||||
|
||||
# Global miner instance
|
||||
miner = AITBCMiner()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("AITBC GPU Mining Service")
|
||||
print("=" * 40)
|
||||
|
||||
try:
|
||||
miner.start_mining()
|
||||
|
||||
# Keep running
|
||||
while True:
|
||||
time.sleep(10)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nShutting down...")
|
||||
miner.stop_mining()
|
||||
@@ -1,180 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "=== Quick AITBC Miner Dashboard Setup ==="
|
||||
|
||||
# Create directory
|
||||
sudo mkdir -p /opt/aitbc-miner-dashboard
|
||||
|
||||
# Create simple dashboard
|
||||
cat > /opt/aitbc-miner-dashboard/index.html << 'HTML'
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>AITBC Miner Dashboard</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
||||
</head>
|
||||
<body class="bg-gray-900 text-white min-h-screen">
|
||||
<div class="container mx-auto px-6 py-8">
|
||||
<div class="flex items-center justify-between mb-8">
|
||||
<h1 class="text-3xl font-bold flex items-center">
|
||||
<i class="fas fa-microchip text-purple-500 mr-3"></i>
|
||||
AITBC Miner Dashboard
|
||||
</h1>
|
||||
<div class="flex items-center">
|
||||
<span class="w-3 h-3 bg-green-500 rounded-full mr-2"></span>
|
||||
<span>GPU Connected</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gradient-to-r from-purple-600 to-blue-600 rounded-xl p-6 mb-8">
|
||||
<h2 class="text-2xl font-bold mb-4">NVIDIA GeForce RTX 4060 Ti</h2>
|
||||
<div class="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<p class="text-sm opacity-80">Utilization</p>
|
||||
<p class="text-2xl font-bold" id="util">0%</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<p class="text-sm opacity-80">Temperature</p>
|
||||
<p class="text-2xl font-bold" id="temp">43°C</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<p class="text-sm opacity-80">Power</p>
|
||||
<p class="text-2xl font-bold" id="power">18W</p>
|
||||
</div>
|
||||
<div class="bg-white/10 backdrop-blur rounded-lg p-4">
|
||||
<p class="text-sm opacity-80">Memory</p>
|
||||
<p class="text-2xl font-bold" id="mem">2.9GB</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-8">
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-tasks text-green-500 mr-2"></i>
|
||||
Mining Jobs
|
||||
</h3>
|
||||
<div class="text-center text-gray-500 py-12">
|
||||
<i class="fas fa-inbox text-5xl mb-4"></i>
|
||||
<p>No active jobs</p>
|
||||
<p class="text-sm mt-2">Miner is ready to receive jobs</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4 flex items-center">
|
||||
<i class="fas fa-server text-blue-500 mr-2"></i>
|
||||
Available Services
|
||||
</h3>
|
||||
<div class="space-y-3">
|
||||
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
|
||||
<span>GPU Computing</span>
|
||||
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
|
||||
<span>Parallel Processing</span>
|
||||
<span class="bg-green-600 px-2 py-1 rounded text-xs">Active</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
|
||||
<span>Hash Generation</span>
|
||||
<span class="bg-yellow-600 px-2 py-1 rounded text-xs">Standby</span>
|
||||
</div>
|
||||
<div class="bg-gray-700 rounded-lg p-3 flex justify-between items-center">
|
||||
<span>AI Model Training</span>
|
||||
<span class="bg-gray-600 px-2 py-1 rounded text-xs">Available</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-8 bg-gray-800 rounded-xl p-6">
|
||||
<h3 class="text-xl font-bold mb-4">Mining Statistics</h3>
|
||||
<div class="grid grid-cols-2 md:grid-cols-4 gap-4 text-center">
|
||||
<div>
|
||||
<p class="text-3xl font-bold text-green-500">0</p>
|
||||
<p class="text-sm text-gray-400">Jobs Completed</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-3xl font-bold text-blue-500">0s</p>
|
||||
<p class="text-sm text-gray-400">Avg Job Time</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-3xl font-bold text-purple-500">100%</p>
|
||||
<p class="text-sm text-gray-400">Success Rate</p>
|
||||
</div>
|
||||
<div>
|
||||
<p class="text-3xl font-bold text-yellow-500">0 MH/s</p>
|
||||
<p class="text-sm text-gray-400">Hash Rate</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Simulate real-time updates
|
||||
let util = 0;
|
||||
let temp = 43;
|
||||
let power = 18;
|
||||
|
||||
function updateStats() {
|
||||
// Simulate GPU usage
|
||||
util = Math.max(0, Math.min(100, util + (Math.random() - 0.5) * 10));
|
||||
temp = Math.max(35, Math.min(85, temp + (Math.random() - 0.5) * 2));
|
||||
power = Math.max(10, Math.min(165, util * 1.5 + (Math.random() - 0.5) * 5));
|
||||
|
||||
document.getElementById('util').textContent = Math.round(util) + '%';
|
||||
document.getElementById('temp').textContent = Math.round(temp) + '°C';
|
||||
document.getElementById('power').textContent = Math.round(power) + 'W';
|
||||
document.getElementById('mem').textContent = (2.9 + util * 0.1).toFixed(1) + 'GB';
|
||||
}
|
||||
|
||||
// Update every 2 seconds
|
||||
setInterval(updateStats, 2000);
|
||||
updateStats();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
HTML
|
||||
|
||||
# Create simple Python server
|
||||
cat > /opt/aitbc-miner-dashboard/serve.py << 'PY'
|
||||
import http.server
|
||||
import socketserver
|
||||
import os
|
||||
|
||||
PORT = 8080
|
||||
os.chdir('/opt/aitbc-miner-dashboard')
|
||||
|
||||
Handler = http.server.SimpleHTTPRequestHandler
|
||||
with socketserver.TCPServer(("", PORT), Handler) as httpd:
|
||||
print(f"Dashboard running at http://localhost:{PORT}")
|
||||
httpd.serve_forever()
|
||||
PY
|
||||
|
||||
# Create systemd service
|
||||
cat > /etc/systemd/system/aitbc-miner-dashboard.service << 'EOF'
|
||||
[Unit]
|
||||
Description=AITBC Miner Dashboard
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/opt/aitbc-miner-dashboard
|
||||
ExecStart=/usr/bin/python3 serve.py
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
# Start service
|
||||
systemctl daemon-reload
|
||||
systemctl enable aitbc-miner-dashboard
|
||||
systemctl start aitbc-miner-dashboard
|
||||
|
||||
echo ""
|
||||
echo "✅ Dashboard deployed!"
|
||||
echo "Access at: http://localhost:8080"
|
||||
echo "Check status: systemctl status aitbc-miner-dashboard"
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "=== AITBC Miner Dashboard Setup ==="
|
||||
echo ""
|
||||
|
||||
# Create directory
|
||||
sudo mkdir -p /opt/aitbc-miner-dashboard
|
||||
sudo cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/
|
||||
|
||||
# Create virtual environment
|
||||
cd /opt/aitbc-miner-dashboard
|
||||
sudo python3 -m venv .venv
|
||||
sudo .venv/bin/pip install psutil
|
||||
|
||||
# Install systemd service
|
||||
sudo cp aitbc-miner-dashboard.service /etc/systemd/system/
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable aitbc-miner-dashboard
|
||||
sudo systemctl start aitbc-miner-dashboard
|
||||
|
||||
# Wait for service to start
|
||||
sleep 3
|
||||
|
||||
# Check status
|
||||
sudo systemctl status aitbc-miner-dashboard --no-pager -l | head -10
|
||||
|
||||
echo ""
|
||||
echo "✅ Miner Dashboard is running at: http://localhost:8080"
|
||||
echo ""
|
||||
echo "To access from other machines, use: http://$(hostname -I | awk '{print $1}'):8080"
|
||||
@@ -1,27 +0,0 @@
|
||||
# Miner Node
|
||||
|
||||
## Purpose & Scope
|
||||
|
||||
Worker daemon responsible for executing compute jobs on CPU/GPU hardware, reporting telemetry, and submitting proofs back to the coordinator. See `docs/bootstrap/miner_node.md` for the detailed implementation roadmap.
|
||||
|
||||
## Development Setup
|
||||
|
||||
- Create a Python virtual environment under `apps/miner-node/.venv`.
|
||||
- Install dependencies (FastAPI optional for health endpoint, `httpx`, `pydantic`, `psutil`).
|
||||
- Implement the package structure described in the bootstrap guide.
|
||||
|
||||
## Production Deployment (systemd)
|
||||
|
||||
1. Copy the project to `/opt/aitbc/apps/miner-node/` on the target host.
|
||||
2. Create a virtual environment and install dependencies as needed.
|
||||
3. Populate `.env` with coordinator URL/API token settings.
|
||||
4. Run the installer script from repo root:
|
||||
```bash
|
||||
sudo scripts/ops/install_miner_systemd.sh
|
||||
```
|
||||
This installs `configs/systemd/aitbc-miner.service`, reloads systemd, and enables the service.
|
||||
5. Check status/logs:
|
||||
```bash
|
||||
sudo systemctl status aitbc-miner
|
||||
journalctl -u aitbc-miner -f
|
||||
```
|
||||
@@ -1,15 +0,0 @@
|
||||
"""
|
||||
Miner plugin system for GPU service execution
|
||||
"""
|
||||
|
||||
from .base import ServicePlugin, PluginResult
|
||||
from .registry import PluginRegistry
|
||||
from .exceptions import PluginError, PluginNotFoundError
|
||||
|
||||
__all__ = [
|
||||
"ServicePlugin",
|
||||
"PluginResult",
|
||||
"PluginRegistry",
|
||||
"PluginError",
|
||||
"PluginNotFoundError"
|
||||
]
|
||||
@@ -1,111 +0,0 @@
|
||||
"""
|
||||
Base plugin interface for GPU service execution
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any, Optional, List
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import asyncio
|
||||
|
||||
|
||||
@dataclass
|
||||
class PluginResult:
|
||||
"""Result from plugin execution"""
|
||||
success: bool
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
error: Optional[str] = None
|
||||
metrics: Optional[Dict[str, Any]] = None
|
||||
execution_time: Optional[float] = None
|
||||
|
||||
|
||||
class ServicePlugin(ABC):
|
||||
"""Base class for all service plugins"""
|
||||
|
||||
def __init__(self):
|
||||
self.service_id = None
|
||||
self.name = None
|
||||
self.version = "1.0.0"
|
||||
self.description = ""
|
||||
self.capabilities = []
|
||||
|
||||
@abstractmethod
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute the service with given parameters"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate request parameters, return list of errors"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for this plugin"""
|
||||
pass
|
||||
|
||||
def get_metrics(self) -> Dict[str, Any]:
|
||||
"""Get plugin-specific metrics"""
|
||||
return {
|
||||
"service_id": self.service_id,
|
||||
"name": self.name,
|
||||
"version": self.version
|
||||
}
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check if plugin dependencies are available"""
|
||||
return True
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize plugin resources"""
|
||||
pass
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Cleanup plugin resources"""
|
||||
pass
|
||||
|
||||
|
||||
class GPUPlugin(ServicePlugin):
|
||||
"""Base class for GPU-accelerated plugins"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.gpu_available = False
|
||||
self.vram_gb = 0
|
||||
self.cuda_available = False
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Check GPU availability"""
|
||||
self._detect_gpu()
|
||||
|
||||
def _detect_gpu(self) -> None:
|
||||
"""Detect GPU and VRAM"""
|
||||
try:
|
||||
import torch
|
||||
if torch.cuda.is_available():
|
||||
self.gpu_available = True
|
||||
self.cuda_available = True
|
||||
self.vram_gb = torch.cuda.get_device_properties(0).total_memory / (1024**3)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import GPUtil
|
||||
gpus = GPUtil.getGPUs()
|
||||
if gpus:
|
||||
self.gpu_available = True
|
||||
self.vram_gb = gpus[0].memoryTotal / 1024
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Default GPU requirements"""
|
||||
return {
|
||||
"gpu": "any",
|
||||
"vram_gb": 4,
|
||||
"cuda": "recommended"
|
||||
}
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check GPU health"""
|
||||
return self.gpu_available
|
||||
@@ -1,371 +0,0 @@
|
||||
"""
|
||||
Blender 3D rendering plugin
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import json
|
||||
from typing import Dict, Any, List, Optional
|
||||
import time
|
||||
|
||||
from .base import GPUPlugin, PluginResult
|
||||
from .exceptions import PluginExecutionError
|
||||
|
||||
|
||||
class BlenderPlugin(GPUPlugin):
|
||||
"""Plugin for Blender 3D rendering"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.service_id = "blender"
|
||||
self.name = "Blender Rendering"
|
||||
self.version = "1.0.0"
|
||||
self.description = "Render 3D scenes using Blender"
|
||||
self.capabilities = ["render", "animation", "cycles", "eevee"]
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize Blender dependencies"""
|
||||
super().setup()
|
||||
|
||||
# Check for Blender installation
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["blender", "--version"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
self.blender_path = "blender"
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
raise PluginExecutionError("Blender not found. Install Blender for 3D rendering")
|
||||
|
||||
# Check for bpy module (Python API)
|
||||
try:
|
||||
import bpy
|
||||
self.bpy_available = True
|
||||
except ImportError:
|
||||
self.bpy_available = False
|
||||
print("Warning: bpy module not available. Some features may be limited.")
|
||||
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate Blender request parameters"""
|
||||
errors = []
|
||||
|
||||
# Check required parameters
|
||||
if "blend_file" not in request and "scene_data" not in request:
|
||||
errors.append("Either 'blend_file' or 'scene_data' must be provided")
|
||||
|
||||
# Validate engine
|
||||
engine = request.get("engine", "cycles")
|
||||
valid_engines = ["cycles", "eevee", "workbench"]
|
||||
if engine not in valid_engines:
|
||||
errors.append(f"Invalid engine. Must be one of: {', '.join(valid_engines)}")
|
||||
|
||||
# Validate resolution
|
||||
resolution_x = request.get("resolution_x", 1920)
|
||||
resolution_y = request.get("resolution_y", 1080)
|
||||
|
||||
if not isinstance(resolution_x, int) or resolution_x < 1 or resolution_x > 65536:
|
||||
errors.append("resolution_x must be an integer between 1 and 65536")
|
||||
if not isinstance(resolution_y, int) or resolution_y < 1 or resolution_y > 65536:
|
||||
errors.append("resolution_y must be an integer between 1 and 65536")
|
||||
|
||||
# Validate samples
|
||||
samples = request.get("samples", 128)
|
||||
if not isinstance(samples, int) or samples < 1 or samples > 10000:
|
||||
errors.append("samples must be an integer between 1 and 10000")
|
||||
|
||||
# Validate frame range for animation
|
||||
if request.get("animation", False):
|
||||
frame_start = request.get("frame_start", 1)
|
||||
frame_end = request.get("frame_end", 250)
|
||||
|
||||
if not isinstance(frame_start, int) or frame_start < 1:
|
||||
errors.append("frame_start must be >= 1")
|
||||
if not isinstance(frame_end, int) or frame_end < frame_start:
|
||||
errors.append("frame_end must be >= frame_start")
|
||||
|
||||
return errors
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for Blender"""
|
||||
return {
|
||||
"gpu": "recommended",
|
||||
"vram_gb": 4,
|
||||
"ram_gb": 16,
|
||||
"cuda": "recommended"
|
||||
}
|
||||
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute Blender rendering"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Validate request
|
||||
errors = self.validate_request(request)
|
||||
if errors:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=f"Validation failed: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
# Get parameters
|
||||
blend_file = request.get("blend_file")
|
||||
scene_data = request.get("scene_data")
|
||||
engine = request.get("engine", "cycles")
|
||||
resolution_x = request.get("resolution_x", 1920)
|
||||
resolution_y = request.get("resolution_y", 1080)
|
||||
samples = request.get("samples", 128)
|
||||
animation = request.get("animation", False)
|
||||
frame_start = request.get("frame_start", 1)
|
||||
frame_end = request.get("frame_end", 250)
|
||||
output_format = request.get("output_format", "png")
|
||||
gpu_acceleration = request.get("gpu_acceleration", self.gpu_available)
|
||||
|
||||
# Prepare input file
|
||||
input_file = await self._prepare_input_file(blend_file, scene_data)
|
||||
|
||||
# Build Blender command
|
||||
cmd = self._build_blender_command(
|
||||
input_file=input_file,
|
||||
engine=engine,
|
||||
resolution_x=resolution_x,
|
||||
resolution_y=resolution_y,
|
||||
samples=samples,
|
||||
animation=animation,
|
||||
frame_start=frame_start,
|
||||
frame_end=frame_end,
|
||||
output_format=output_format,
|
||||
gpu_acceleration=gpu_acceleration
|
||||
)
|
||||
|
||||
# Execute Blender
|
||||
output_files = await self._execute_blender(cmd, animation, frame_start, frame_end)
|
||||
|
||||
# Get render statistics
|
||||
render_stats = await self._get_render_stats(output_files[0] if output_files else None)
|
||||
|
||||
# Clean up input file if created from scene data
|
||||
if scene_data:
|
||||
os.unlink(input_file)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"output_files": output_files,
|
||||
"count": len(output_files),
|
||||
"animation": animation,
|
||||
"parameters": {
|
||||
"engine": engine,
|
||||
"resolution": f"{resolution_x}x{resolution_y}",
|
||||
"samples": samples,
|
||||
"gpu_acceleration": gpu_acceleration
|
||||
}
|
||||
},
|
||||
metrics={
|
||||
"engine": engine,
|
||||
"frames_rendered": len(output_files),
|
||||
"render_time": execution_time,
|
||||
"time_per_frame": execution_time / len(output_files) if output_files else 0,
|
||||
"samples_per_second": (samples * len(output_files)) / execution_time if execution_time > 0 else 0,
|
||||
"render_stats": render_stats
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=str(e),
|
||||
execution_time=time.time() - start_time
|
||||
)
|
||||
|
||||
async def _prepare_input_file(self, blend_file: Optional[str], scene_data: Optional[Dict]) -> str:
|
||||
"""Prepare input .blend file"""
|
||||
if blend_file:
|
||||
# Use provided file
|
||||
if not os.path.exists(blend_file):
|
||||
raise PluginExecutionError(f"Blend file not found: {blend_file}")
|
||||
return blend_file
|
||||
elif scene_data:
|
||||
# Create blend file from scene data
|
||||
if not self.bpy_available:
|
||||
raise PluginExecutionError("Cannot create scene without bpy module")
|
||||
|
||||
# Create a temporary Python script to generate the scene
|
||||
script = tempfile.mktemp(suffix=".py")
|
||||
output_blend = tempfile.mktemp(suffix=".blend")
|
||||
|
||||
with open(script, "w") as f:
|
||||
f.write(f"""
|
||||
import bpy
|
||||
import json
|
||||
|
||||
# Load scene data
|
||||
scene_data = json.loads('''{json.dumps(scene_data)}''')
|
||||
|
||||
# Clear default scene
|
||||
bpy.ops.object.select_all(action='SELECT')
|
||||
bpy.ops.object.delete()
|
||||
|
||||
# Create scene from data
|
||||
# This is a simplified example - in practice, you'd parse the scene_data
|
||||
# and create appropriate objects, materials, lights, etc.
|
||||
|
||||
# Save blend file
|
||||
bpy.ops.wm.save_as_mainfile(filepath='{output_blend}')
|
||||
""")
|
||||
|
||||
# Run Blender to create the scene
|
||||
cmd = [self.blender_path, "--background", "--python", script]
|
||||
process = await asyncio.create_subprocess_exec(*cmd)
|
||||
await process.communicate()
|
||||
|
||||
# Clean up script
|
||||
os.unlink(script)
|
||||
|
||||
return output_blend
|
||||
else:
|
||||
raise PluginExecutionError("Either blend_file or scene_data must be provided")
|
||||
|
||||
def _build_blender_command(
|
||||
self,
|
||||
input_file: str,
|
||||
engine: str,
|
||||
resolution_x: int,
|
||||
resolution_y: int,
|
||||
samples: int,
|
||||
animation: bool,
|
||||
frame_start: int,
|
||||
frame_end: int,
|
||||
output_format: str,
|
||||
gpu_acceleration: bool
|
||||
) -> List[str]:
|
||||
"""Build Blender command"""
|
||||
cmd = [
|
||||
self.blender_path,
|
||||
"--background",
|
||||
input_file,
|
||||
"--render-engine", engine,
|
||||
"--render-format", output_format.upper()
|
||||
]
|
||||
|
||||
# Add Python script for settings
|
||||
script = tempfile.mktemp(suffix=".py")
|
||||
with open(script, "w") as f:
|
||||
f.write(f"""
|
||||
import bpy
|
||||
|
||||
# Set resolution
|
||||
bpy.context.scene.render.resolution_x = {resolution_x}
|
||||
bpy.context.scene.render.resolution_y = {resolution_y}
|
||||
|
||||
# Set samples for Cycles
|
||||
if bpy.context.scene.render.engine == 'CYCLES':
|
||||
bpy.context.scene.cycles.samples = {samples}
|
||||
|
||||
# Enable GPU rendering if available
|
||||
if {str(gpu_acceleration).lower()}:
|
||||
bpy.context.scene.cycles.device = 'GPU'
|
||||
preferences = bpy.context.preferences
|
||||
cycles_preferences = preferences.addons['cycles'].preferences
|
||||
cycles_preferences.compute_device_type = 'CUDA'
|
||||
cycles_preferences.get_devices()
|
||||
for device in cycles_preferences.devices:
|
||||
device.use = True
|
||||
|
||||
# Set frame range for animation
|
||||
if {str(animation).lower()}:
|
||||
bpy.context.scene.frame_start = {frame_start}
|
||||
bpy.context.scene.frame_end = {frame_end}
|
||||
|
||||
# Set output path
|
||||
bpy.context.scene.render.filepath = '{tempfile.mkdtemp()}/render_'
|
||||
|
||||
# Save settings
|
||||
bpy.ops.wm.save_mainfile()
|
||||
""")
|
||||
|
||||
cmd.extend(["--python", script])
|
||||
|
||||
# Add render command
|
||||
if animation:
|
||||
cmd.extend(["-a"]) # Render animation
|
||||
else:
|
||||
cmd.extend(["-f", "1"]) # Render single frame
|
||||
|
||||
return cmd
|
||||
|
||||
async def _execute_blender(
|
||||
self,
|
||||
cmd: List[str],
|
||||
animation: bool,
|
||||
frame_start: int,
|
||||
frame_end: int
|
||||
) -> List[str]:
|
||||
"""Execute Blender command"""
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
error_msg = stderr.decode() if stderr else "Blender failed"
|
||||
raise PluginExecutionError(f"Blender error: {error_msg}")
|
||||
|
||||
# Find output files
|
||||
output_dir = tempfile.mkdtemp()
|
||||
output_pattern = os.path.join(output_dir, "render_*")
|
||||
|
||||
if animation:
|
||||
# Animation creates multiple files
|
||||
import glob
|
||||
output_files = glob.glob(output_pattern)
|
||||
output_files.sort() # Ensure frame order
|
||||
else:
|
||||
# Single frame
|
||||
output_files = [glob.glob(output_pattern)[0]]
|
||||
|
||||
return output_files
|
||||
|
||||
async def _get_render_stats(self, output_file: Optional[str]) -> Dict[str, Any]:
|
||||
"""Get render statistics"""
|
||||
if not output_file or not os.path.exists(output_file):
|
||||
return {}
|
||||
|
||||
# Get file size and basic info
|
||||
file_size = os.path.getsize(output_file)
|
||||
|
||||
# Try to get image dimensions
|
||||
try:
|
||||
from PIL import Image
|
||||
with Image.open(output_file) as img:
|
||||
width, height = img.size
|
||||
except:
|
||||
width = height = None
|
||||
|
||||
return {
|
||||
"file_size": file_size,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"format": os.path.splitext(output_file)[1][1:].upper()
|
||||
}
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check Blender health"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["blender", "--version"],
|
||||
capture_output=True,
|
||||
check=True
|
||||
)
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
return False
|
||||
@@ -1,215 +0,0 @@
|
||||
"""
|
||||
Plugin discovery and matching system
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict, List, Set, Optional
|
||||
import requests
|
||||
|
||||
from .registry import registry
|
||||
from .base import ServicePlugin
|
||||
from .exceptions import PluginNotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ServiceDiscovery:
|
||||
"""Discovers and matches services to plugins"""
|
||||
|
||||
def __init__(self, pool_hub_url: str, miner_id: str):
|
||||
self.pool_hub_url = pool_hub_url
|
||||
self.miner_id = miner_id
|
||||
self.enabled_services: Set[str] = set()
|
||||
self.service_configs: Dict[str, Dict] = {}
|
||||
self._last_update = 0
|
||||
self._update_interval = 60 # seconds
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Start the discovery service"""
|
||||
logger.info("Starting service discovery")
|
||||
|
||||
# Initialize plugin registry
|
||||
await registry.initialize()
|
||||
|
||||
# Initial sync
|
||||
await self.sync_services()
|
||||
|
||||
# Start background sync task
|
||||
asyncio.create_task(self._sync_loop())
|
||||
|
||||
async def sync_services(self) -> None:
|
||||
"""Sync enabled services from pool-hub"""
|
||||
try:
|
||||
# Get service configurations from pool-hub
|
||||
response = requests.get(
|
||||
f"{self.pool_hub_url}/v1/services/",
|
||||
headers={"X-Miner-ID": self.miner_id}
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
services = response.json()
|
||||
|
||||
# Update local state
|
||||
new_enabled = set()
|
||||
new_configs = {}
|
||||
|
||||
for service in services:
|
||||
if service.get("enabled", False):
|
||||
service_id = service["service_type"]
|
||||
new_enabled.add(service_id)
|
||||
new_configs[service_id] = service
|
||||
|
||||
# Find changes
|
||||
added = new_enabled - self.enabled_services
|
||||
removed = self.enabled_services - new_enabled
|
||||
updated = set()
|
||||
|
||||
for service_id in self.enabled_services & new_enabled:
|
||||
if new_configs[service_id] != self.service_configs.get(service_id):
|
||||
updated.add(service_id)
|
||||
|
||||
# Apply changes
|
||||
for service_id in removed:
|
||||
await self._disable_service(service_id)
|
||||
|
||||
for service_id in added:
|
||||
await self._enable_service(service_id, new_configs[service_id])
|
||||
|
||||
for service_id in updated:
|
||||
await self._update_service(service_id, new_configs[service_id])
|
||||
|
||||
# Update state
|
||||
self.enabled_services = new_enabled
|
||||
self.service_configs = new_configs
|
||||
self._last_update = asyncio.get_event_loop().time()
|
||||
|
||||
logger.info(f"Synced services: {len(self.enabled_services)} enabled")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sync services: {e}")
|
||||
|
||||
async def _enable_service(self, service_id: str, config: Dict) -> None:
|
||||
"""Enable a service"""
|
||||
try:
|
||||
# Check if plugin exists
|
||||
if service_id not in registry.list_plugins():
|
||||
logger.warning(f"No plugin available for service: {service_id}")
|
||||
return
|
||||
|
||||
# Load plugin
|
||||
plugin = registry.load_plugin(service_id)
|
||||
|
||||
# Validate hardware requirements
|
||||
await self._validate_hardware_requirements(plugin, config)
|
||||
|
||||
# Configure plugin if needed
|
||||
if hasattr(plugin, 'configure'):
|
||||
await plugin.configure(config.get('config', {}))
|
||||
|
||||
logger.info(f"Enabled service: {service_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to enable service {service_id}: {e}")
|
||||
|
||||
async def _disable_service(self, service_id: str) -> None:
|
||||
"""Disable a service"""
|
||||
try:
|
||||
# Unload plugin to free resources
|
||||
registry.unload_plugin(service_id)
|
||||
logger.info(f"Disabled service: {service_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to disable service {service_id}: {e}")
|
||||
|
||||
async def _update_service(self, service_id: str, config: Dict) -> None:
|
||||
"""Update service configuration"""
|
||||
# For now, just disable and re-enable
|
||||
await self._disable_service(service_id)
|
||||
await self._enable_service(service_id, config)
|
||||
|
||||
async def _validate_hardware_requirements(self, plugin: ServicePlugin, config: Dict) -> None:
|
||||
"""Validate that miner meets plugin requirements"""
|
||||
requirements = plugin.get_hardware_requirements()
|
||||
|
||||
# This would check against actual miner hardware
|
||||
# For now, just log the requirements
|
||||
logger.debug(f"Hardware requirements for {plugin.service_id}: {requirements}")
|
||||
|
||||
async def _sync_loop(self) -> None:
|
||||
"""Background sync loop"""
|
||||
while True:
|
||||
await asyncio.sleep(self._update_interval)
|
||||
await self.sync_services()
|
||||
|
||||
async def execute_service(self, service_id: str, request: Dict) -> Dict:
|
||||
"""Execute a service request"""
|
||||
try:
|
||||
# Check if service is enabled
|
||||
if service_id not in self.enabled_services:
|
||||
raise PluginNotFoundError(f"Service {service_id} is not enabled")
|
||||
|
||||
# Get plugin
|
||||
plugin = registry.get_plugin(service_id)
|
||||
if not plugin:
|
||||
raise PluginNotFoundError(f"No plugin loaded for service: {service_id}")
|
||||
|
||||
# Execute request
|
||||
result = await plugin.execute(request)
|
||||
|
||||
# Convert result to dict
|
||||
return {
|
||||
"success": result.success,
|
||||
"data": result.data,
|
||||
"error": result.error,
|
||||
"metrics": result.metrics,
|
||||
"execution_time": result.execution_time
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to execute service {service_id}: {e}")
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
def get_enabled_services(self) -> List[str]:
|
||||
"""Get list of enabled services"""
|
||||
return list(self.enabled_services)
|
||||
|
||||
def get_service_status(self) -> Dict[str, Dict]:
|
||||
"""Get status of all services"""
|
||||
status = {}
|
||||
|
||||
for service_id in registry.list_plugins():
|
||||
plugin = registry.get_plugin(service_id)
|
||||
status[service_id] = {
|
||||
"enabled": service_id in self.enabled_services,
|
||||
"loaded": plugin is not None,
|
||||
"config": self.service_configs.get(service_id, {}),
|
||||
"capabilities": plugin.capabilities if plugin else []
|
||||
}
|
||||
|
||||
return status
|
||||
|
||||
async def health_check(self) -> Dict[str, bool]:
|
||||
"""Health check all enabled services"""
|
||||
results = {}
|
||||
|
||||
for service_id in self.enabled_services:
|
||||
plugin = registry.get_plugin(service_id)
|
||||
if plugin:
|
||||
try:
|
||||
results[service_id] = await plugin.health_check()
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed for {service_id}: {e}")
|
||||
results[service_id] = False
|
||||
else:
|
||||
results[service_id] = False
|
||||
|
||||
return results
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop the discovery service"""
|
||||
logger.info("Stopping service discovery")
|
||||
registry.cleanup_all()
|
||||
@@ -1,23 +0,0 @@
|
||||
"""
|
||||
Plugin system exceptions
|
||||
"""
|
||||
|
||||
|
||||
class PluginError(Exception):
|
||||
"""Base exception for plugin errors"""
|
||||
pass
|
||||
|
||||
|
||||
class PluginNotFoundError(PluginError):
|
||||
"""Raised when a plugin is not found"""
|
||||
pass
|
||||
|
||||
|
||||
class PluginValidationError(PluginError):
|
||||
"""Raised when plugin validation fails"""
|
||||
pass
|
||||
|
||||
|
||||
class PluginExecutionError(PluginError):
|
||||
"""Raised when plugin execution fails"""
|
||||
pass
|
||||
@@ -1,318 +0,0 @@
|
||||
"""
|
||||
FFmpeg video processing plugin
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
from typing import Dict, Any, List
|
||||
import time
|
||||
|
||||
from .base import ServicePlugin, PluginResult
|
||||
from .exceptions import PluginExecutionError
|
||||
|
||||
|
||||
class FFmpegPlugin(ServicePlugin):
|
||||
"""Plugin for FFmpeg video processing"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.service_id = "ffmpeg"
|
||||
self.name = "FFmpeg Video Processing"
|
||||
self.version = "1.0.0"
|
||||
self.description = "Transcode and process video files using FFmpeg"
|
||||
self.capabilities = ["transcode", "resize", "compress", "convert"]
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize FFmpeg dependencies"""
|
||||
# Check for ffmpeg installation
|
||||
try:
|
||||
subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True)
|
||||
self.ffmpeg_path = "ffmpeg"
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
raise PluginExecutionError("FFmpeg not found. Install FFmpeg for video processing")
|
||||
|
||||
# Check for NVIDIA GPU support
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ffmpeg", "-hide_banner", "-encoders"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
self.gpu_acceleration = "h264_nvenc" in result.stdout
|
||||
except subprocess.CalledProcessError:
|
||||
self.gpu_acceleration = False
|
||||
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate FFmpeg request parameters"""
|
||||
errors = []
|
||||
|
||||
# Check required parameters
|
||||
if "input_url" not in request and "input_file" not in request:
|
||||
errors.append("Either 'input_url' or 'input_file' must be provided")
|
||||
|
||||
# Validate output format
|
||||
output_format = request.get("output_format", "mp4")
|
||||
valid_formats = ["mp4", "avi", "mov", "mkv", "webm", "flv"]
|
||||
if output_format not in valid_formats:
|
||||
errors.append(f"Invalid output format. Must be one of: {', '.join(valid_formats)}")
|
||||
|
||||
# Validate codec
|
||||
codec = request.get("codec", "h264")
|
||||
valid_codecs = ["h264", "h265", "vp9", "av1", "mpeg4"]
|
||||
if codec not in valid_codecs:
|
||||
errors.append(f"Invalid codec. Must be one of: {', '.join(valid_codecs)}")
|
||||
|
||||
# Validate resolution
|
||||
resolution = request.get("resolution")
|
||||
if resolution:
|
||||
valid_resolutions = ["720p", "1080p", "1440p", "4K", "8K"]
|
||||
if resolution not in valid_resolutions:
|
||||
errors.append(f"Invalid resolution. Must be one of: {', '.join(valid_resolutions)}")
|
||||
|
||||
# Validate bitrate
|
||||
bitrate = request.get("bitrate")
|
||||
if bitrate:
|
||||
if not isinstance(bitrate, str) or not bitrate.endswith(("k", "M")):
|
||||
errors.append("Bitrate must end with 'k' or 'M' (e.g., '1000k', '5M')")
|
||||
|
||||
# Validate frame rate
|
||||
fps = request.get("fps")
|
||||
if fps:
|
||||
if not isinstance(fps, (int, float)) or fps < 1 or fps > 120:
|
||||
errors.append("FPS must be between 1 and 120")
|
||||
|
||||
return errors
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for FFmpeg"""
|
||||
return {
|
||||
"gpu": "optional",
|
||||
"vram_gb": 2,
|
||||
"ram_gb": 8,
|
||||
"storage_gb": 10
|
||||
}
|
||||
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute FFmpeg processing"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Validate request
|
||||
errors = self.validate_request(request)
|
||||
if errors:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=f"Validation failed: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
# Get parameters
|
||||
input_source = request.get("input_url") or request.get("input_file")
|
||||
output_format = request.get("output_format", "mp4")
|
||||
codec = request.get("codec", "h264")
|
||||
resolution = request.get("resolution")
|
||||
bitrate = request.get("bitrate")
|
||||
fps = request.get("fps")
|
||||
gpu_acceleration = request.get("gpu_acceleration", self.gpu_acceleration)
|
||||
|
||||
# Get input file
|
||||
input_file = await self._get_input_file(input_source)
|
||||
|
||||
# Build FFmpeg command
|
||||
cmd = self._build_ffmpeg_command(
|
||||
input_file=input_file,
|
||||
output_format=output_format,
|
||||
codec=codec,
|
||||
resolution=resolution,
|
||||
bitrate=bitrate,
|
||||
fps=fps,
|
||||
gpu_acceleration=gpu_acceleration
|
||||
)
|
||||
|
||||
# Execute FFmpeg
|
||||
output_file = await self._execute_ffmpeg(cmd)
|
||||
|
||||
# Get output file info
|
||||
output_info = await self._get_video_info(output_file)
|
||||
|
||||
# Clean up input file if downloaded
|
||||
if input_source != request.get("input_file"):
|
||||
os.unlink(input_file)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"output_file": output_file,
|
||||
"output_info": output_info,
|
||||
"parameters": {
|
||||
"codec": codec,
|
||||
"resolution": resolution,
|
||||
"bitrate": bitrate,
|
||||
"fps": fps,
|
||||
"gpu_acceleration": gpu_acceleration
|
||||
}
|
||||
},
|
||||
metrics={
|
||||
"input_size": os.path.getsize(input_file),
|
||||
"output_size": os.path.getsize(output_file),
|
||||
"compression_ratio": os.path.getsize(output_file) / os.path.getsize(input_file),
|
||||
"processing_time": execution_time,
|
||||
"real_time_factor": output_info.get("duration", 0) / execution_time if execution_time > 0 else 0
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=str(e),
|
||||
execution_time=time.time() - start_time
|
||||
)
|
||||
|
||||
async def _get_input_file(self, source: str) -> str:
|
||||
"""Get input file from URL or path"""
|
||||
if source.startswith(("http://", "https://")):
|
||||
# Download from URL
|
||||
import requests
|
||||
|
||||
response = requests.get(source, stream=True)
|
||||
response.raise_for_status()
|
||||
|
||||
# Save to temporary file
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=".mp4") as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
return f.name
|
||||
else:
|
||||
# Local file
|
||||
if not os.path.exists(source):
|
||||
raise PluginExecutionError(f"Input file not found: {source}")
|
||||
return source
|
||||
|
||||
def _build_ffmpeg_command(
|
||||
self,
|
||||
input_file: str,
|
||||
output_format: str,
|
||||
codec: str,
|
||||
resolution: Optional[str],
|
||||
bitrate: Optional[str],
|
||||
fps: Optional[float],
|
||||
gpu_acceleration: bool
|
||||
) -> List[str]:
|
||||
"""Build FFmpeg command"""
|
||||
cmd = [self.ffmpeg_path, "-i", input_file]
|
||||
|
||||
# Add codec
|
||||
if gpu_acceleration and codec == "h264":
|
||||
cmd.extend(["-c:v", "h264_nvenc"])
|
||||
cmd.extend(["-preset", "fast"])
|
||||
elif gpu_acceleration and codec == "h265":
|
||||
cmd.extend(["-c:v", "hevc_nvenc"])
|
||||
cmd.extend(["-preset", "fast"])
|
||||
else:
|
||||
cmd.extend(["-c:v", codec])
|
||||
|
||||
# Add resolution
|
||||
if resolution:
|
||||
resolution_map = {
|
||||
"720p": ("1280", "720"),
|
||||
"1080p": ("1920", "1080"),
|
||||
"1440p": ("2560", "1440"),
|
||||
"4K": ("3840", "2160"),
|
||||
"8K": ("7680", "4320")
|
||||
}
|
||||
width, height = resolution_map.get(resolution, (None, None))
|
||||
if width and height:
|
||||
cmd.extend(["-s", f"{width}x{height}"])
|
||||
|
||||
# Add bitrate
|
||||
if bitrate:
|
||||
cmd.extend(["-b:v", bitrate])
|
||||
cmd.extend(["-b:a", "128k"]) # Audio bitrate
|
||||
|
||||
# Add FPS
|
||||
if fps:
|
||||
cmd.extend(["-r", str(fps)])
|
||||
|
||||
# Add audio codec
|
||||
cmd.extend(["-c:a", "aac"])
|
||||
|
||||
# Output file
|
||||
output_file = tempfile.mktemp(suffix=f".{output_format}")
|
||||
cmd.append(output_file)
|
||||
|
||||
return cmd
|
||||
|
||||
async def _execute_ffmpeg(self, cmd: List[str]) -> str:
|
||||
"""Execute FFmpeg command"""
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
error_msg = stderr.decode() if stderr else "FFmpeg failed"
|
||||
raise PluginExecutionError(f"FFmpeg error: {error_msg}")
|
||||
|
||||
# Output file is the last argument
|
||||
return cmd[-1]
|
||||
|
||||
async def _get_video_info(self, video_file: str) -> Dict[str, Any]:
|
||||
"""Get video file information"""
|
||||
cmd = [
|
||||
"ffprobe",
|
||||
"-v", "quiet",
|
||||
"-print_format", "json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
video_file
|
||||
]
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await process.communicate()
|
||||
|
||||
if process.returncode != 0:
|
||||
return {}
|
||||
|
||||
import json
|
||||
probe_data = json.loads(stdout.decode())
|
||||
|
||||
# Extract relevant info
|
||||
video_stream = next(
|
||||
(s for s in probe_data.get("streams", []) if s.get("codec_type") == "video"),
|
||||
{}
|
||||
)
|
||||
|
||||
return {
|
||||
"duration": float(probe_data.get("format", {}).get("duration", 0)),
|
||||
"size": int(probe_data.get("format", {}).get("size", 0)),
|
||||
"width": video_stream.get("width"),
|
||||
"height": video_stream.get("height"),
|
||||
"fps": eval(video_stream.get("r_frame_rate", "0/1")),
|
||||
"codec": video_stream.get("codec_name"),
|
||||
"bitrate": int(probe_data.get("format", {}).get("bit_rate", 0))
|
||||
}
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check FFmpeg health"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["ffmpeg", "-version"],
|
||||
capture_output=True,
|
||||
check=True
|
||||
)
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
return False
|
||||
@@ -1,321 +0,0 @@
|
||||
"""
|
||||
LLM inference plugin
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Dict, Any, List, Optional
|
||||
import time
|
||||
|
||||
from .base import GPUPlugin, PluginResult
|
||||
from .exceptions import PluginExecutionError
|
||||
|
||||
|
||||
class LLMPlugin(GPUPlugin):
|
||||
"""Plugin for Large Language Model inference"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.service_id = "llm_inference"
|
||||
self.name = "LLM Inference"
|
||||
self.version = "1.0.0"
|
||||
self.description = "Run inference on large language models"
|
||||
self.capabilities = ["generate", "stream", "chat"]
|
||||
self._model_cache = {}
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize LLM dependencies"""
|
||||
super().setup()
|
||||
|
||||
# Check for transformers installation
|
||||
try:
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
||||
self.transformers = AutoModelForCausalLM
|
||||
self.AutoTokenizer = AutoTokenizer
|
||||
self.pipeline = pipeline
|
||||
except ImportError:
|
||||
raise PluginExecutionError("Transformers not installed. Install with: pip install transformers accelerate")
|
||||
|
||||
# Check for torch
|
||||
try:
|
||||
import torch
|
||||
self.torch = torch
|
||||
except ImportError:
|
||||
raise PluginExecutionError("PyTorch not installed. Install with: pip install torch")
|
||||
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate LLM request parameters"""
|
||||
errors = []
|
||||
|
||||
# Check required parameters
|
||||
if "prompt" not in request:
|
||||
errors.append("'prompt' is required")
|
||||
|
||||
# Validate model
|
||||
model = request.get("model", "llama-7b")
|
||||
valid_models = [
|
||||
"llama-7b",
|
||||
"llama-13b",
|
||||
"mistral-7b",
|
||||
"mixtral-8x7b",
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-4"
|
||||
]
|
||||
if model not in valid_models:
|
||||
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
|
||||
|
||||
# Validate max_tokens
|
||||
max_tokens = request.get("max_tokens", 256)
|
||||
if not isinstance(max_tokens, int) or max_tokens < 1 or max_tokens > 4096:
|
||||
errors.append("max_tokens must be an integer between 1 and 4096")
|
||||
|
||||
# Validate temperature
|
||||
temperature = request.get("temperature", 0.7)
|
||||
if not isinstance(temperature, (int, float)) or temperature < 0.0 or temperature > 2.0:
|
||||
errors.append("temperature must be between 0.0 and 2.0")
|
||||
|
||||
# Validate top_p
|
||||
top_p = request.get("top_p")
|
||||
if top_p is not None and (not isinstance(top_p, (int, float)) or top_p <= 0.0 or top_p > 1.0):
|
||||
errors.append("top_p must be between 0.0 and 1.0")
|
||||
|
||||
return errors
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for LLM inference"""
|
||||
return {
|
||||
"gpu": "recommended",
|
||||
"vram_gb": 8,
|
||||
"ram_gb": 16,
|
||||
"cuda": "recommended"
|
||||
}
|
||||
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute LLM inference"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Validate request
|
||||
errors = self.validate_request(request)
|
||||
if errors:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=f"Validation failed: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
# Get parameters
|
||||
prompt = request["prompt"]
|
||||
model_name = request.get("model", "llama-7b")
|
||||
max_tokens = request.get("max_tokens", 256)
|
||||
temperature = request.get("temperature", 0.7)
|
||||
top_p = request.get("top_p", 0.9)
|
||||
do_sample = request.get("do_sample", True)
|
||||
stream = request.get("stream", False)
|
||||
|
||||
# Load model and tokenizer
|
||||
model, tokenizer = await self._load_model(model_name)
|
||||
|
||||
# Generate response
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if stream:
|
||||
# Streaming generation
|
||||
generator = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self._generate_streaming(
|
||||
model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample
|
||||
)
|
||||
)
|
||||
|
||||
# Collect all tokens
|
||||
full_response = ""
|
||||
tokens = []
|
||||
for token in generator:
|
||||
tokens.append(token)
|
||||
full_response += token
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"text": full_response,
|
||||
"tokens": tokens,
|
||||
"streamed": True
|
||||
},
|
||||
metrics={
|
||||
"model": model_name,
|
||||
"prompt_tokens": len(tokenizer.encode(prompt)),
|
||||
"generated_tokens": len(tokens),
|
||||
"tokens_per_second": len(tokens) / execution_time if execution_time > 0 else 0
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
else:
|
||||
# Regular generation
|
||||
response = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self._generate(
|
||||
model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample
|
||||
)
|
||||
)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"text": response,
|
||||
"streamed": False
|
||||
},
|
||||
metrics={
|
||||
"model": model_name,
|
||||
"prompt_tokens": len(tokenizer.encode(prompt)),
|
||||
"generated_tokens": len(tokenizer.encode(response)) - len(tokenizer.encode(prompt)),
|
||||
"tokens_per_second": (len(tokenizer.encode(response)) - len(tokenizer.encode(prompt))) / execution_time if execution_time > 0 else 0
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=str(e),
|
||||
execution_time=time.time() - start_time
|
||||
)
|
||||
|
||||
async def _load_model(self, model_name: str):
|
||||
"""Load LLM model and tokenizer with caching"""
|
||||
if model_name not in self._model_cache:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Map model names to HuggingFace model IDs
|
||||
model_map = {
|
||||
"llama-7b": "meta-llama/Llama-2-7b-chat-hf",
|
||||
"llama-13b": "meta-llama/Llama-2-13b-chat-hf",
|
||||
"mistral-7b": "mistralai/Mistral-7B-Instruct-v0.1",
|
||||
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
|
||||
"gpt-3.5-turbo": "openai-gpt", # Would need OpenAI API
|
||||
"gpt-4": "openai-gpt-4" # Would need OpenAI API
|
||||
}
|
||||
|
||||
hf_model = model_map.get(model_name, model_name)
|
||||
|
||||
# Load tokenizer
|
||||
tokenizer = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.AutoTokenizer.from_pretrained(hf_model)
|
||||
)
|
||||
|
||||
# Load model
|
||||
device = "cuda" if self.torch.cuda.is_available() else "cpu"
|
||||
model = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.transformers.from_pretrained(
|
||||
hf_model,
|
||||
torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32,
|
||||
device_map="auto" if device == "cuda" else None,
|
||||
load_in_4bit=True if device == "cuda" and self.vram_gb < 16 else False
|
||||
)
|
||||
)
|
||||
|
||||
self._model_cache[model_name] = (model, tokenizer)
|
||||
|
||||
return self._model_cache[model_name]
|
||||
|
||||
def _generate(
|
||||
self,
|
||||
model,
|
||||
tokenizer,
|
||||
prompt: str,
|
||||
max_tokens: int,
|
||||
temperature: float,
|
||||
top_p: float,
|
||||
do_sample: bool
|
||||
) -> str:
|
||||
"""Generate text without streaming"""
|
||||
inputs = tokenizer(prompt, return_tensors="pt")
|
||||
|
||||
if self.torch.cuda.is_available():
|
||||
inputs = {k: v.cuda() for k, v in inputs.items()}
|
||||
|
||||
with self.torch.no_grad():
|
||||
outputs = model.generate(
|
||||
**inputs,
|
||||
max_new_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
top_p=top_p,
|
||||
do_sample=do_sample,
|
||||
pad_token_id=tokenizer.eos_token_id
|
||||
)
|
||||
|
||||
# Decode only the new tokens
|
||||
new_tokens = outputs[0][inputs["input_ids"].shape[1]:]
|
||||
response = tokenizer.decode(new_tokens, skip_special_tokens=True)
|
||||
|
||||
return response
|
||||
|
||||
def _generate_streaming(
|
||||
self,
|
||||
model,
|
||||
tokenizer,
|
||||
prompt: str,
|
||||
max_tokens: int,
|
||||
temperature: float,
|
||||
top_p: float,
|
||||
do_sample: bool
|
||||
):
|
||||
"""Generate text with streaming"""
|
||||
inputs = tokenizer(prompt, return_tensors="pt")
|
||||
|
||||
if self.torch.cuda.is_available():
|
||||
inputs = {k: v.cuda() for k, v in inputs.items()}
|
||||
|
||||
# Simple streaming implementation
|
||||
# In production, you'd use model.generate with streamer
|
||||
with self.torch.no_grad():
|
||||
for i in range(max_tokens):
|
||||
outputs = model.generate(
|
||||
**inputs,
|
||||
max_new_tokens=1,
|
||||
temperature=temperature,
|
||||
top_p=top_p,
|
||||
do_sample=do_sample,
|
||||
pad_token_id=tokenizer.eos_token_id
|
||||
)
|
||||
|
||||
new_token = outputs[0][-1:]
|
||||
text = tokenizer.decode(new_token, skip_special_tokens=True)
|
||||
|
||||
if text == tokenizer.eos_token:
|
||||
break
|
||||
|
||||
yield text
|
||||
|
||||
# Update inputs for next iteration
|
||||
inputs["input_ids"] = self.torch.cat([inputs["input_ids"], new_token], dim=1)
|
||||
if "attention_mask" in inputs:
|
||||
inputs["attention_mask"] = self.torch.cat([
|
||||
inputs["attention_mask"],
|
||||
self.torch.ones((1, 1), device=inputs["attention_mask"].device)
|
||||
], dim=1)
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check LLM health"""
|
||||
try:
|
||||
# Try to load a small model
|
||||
await self._load_model("mistral-7b")
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Cleanup resources"""
|
||||
# Move models to CPU and clear cache
|
||||
for model, _ in self._model_cache.values():
|
||||
if hasattr(model, 'to'):
|
||||
model.to("cpu")
|
||||
self._model_cache.clear()
|
||||
|
||||
# Clear GPU cache
|
||||
if self.torch.cuda.is_available():
|
||||
self.torch.cuda.empty_cache()
|
||||
@@ -1,138 +0,0 @@
|
||||
"""
|
||||
Plugin registry for managing service plugins
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Type, Optional
|
||||
import importlib
|
||||
import inspect
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from .base import ServicePlugin
|
||||
from .exceptions import PluginError, PluginNotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PluginRegistry:
|
||||
"""Registry for managing service plugins"""
|
||||
|
||||
def __init__(self):
|
||||
self._plugins: Dict[str, ServicePlugin] = {}
|
||||
self._plugin_classes: Dict[str, Type[ServicePlugin]] = {}
|
||||
self._loaded = False
|
||||
|
||||
def register(self, plugin_class: Type[ServicePlugin]) -> None:
|
||||
"""Register a plugin class"""
|
||||
plugin_id = getattr(plugin_class, "service_id", plugin_class.__name__)
|
||||
self._plugin_classes[plugin_id] = plugin_class
|
||||
logger.info(f"Registered plugin class: {plugin_id}")
|
||||
|
||||
def load_plugin(self, service_id: str) -> ServicePlugin:
|
||||
"""Load and instantiate a plugin"""
|
||||
if service_id not in self._plugin_classes:
|
||||
raise PluginNotFoundError(f"Plugin {service_id} not found")
|
||||
|
||||
if service_id in self._plugins:
|
||||
return self._plugins[service_id]
|
||||
|
||||
try:
|
||||
plugin_class = self._plugin_classes[service_id]
|
||||
plugin = plugin_class()
|
||||
plugin.setup()
|
||||
self._plugins[service_id] = plugin
|
||||
logger.info(f"Loaded plugin: {service_id}")
|
||||
return plugin
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load plugin {service_id}: {e}")
|
||||
raise PluginError(f"Failed to load plugin {service_id}: {e}")
|
||||
|
||||
def get_plugin(self, service_id: str) -> Optional[ServicePlugin]:
|
||||
"""Get loaded plugin"""
|
||||
return self._plugins.get(service_id)
|
||||
|
||||
def unload_plugin(self, service_id: str) -> None:
|
||||
"""Unload a plugin"""
|
||||
if service_id in self._plugins:
|
||||
plugin = self._plugins[service_id]
|
||||
plugin.cleanup()
|
||||
del self._plugins[service_id]
|
||||
logger.info(f"Unloaded plugin: {service_id}")
|
||||
|
||||
def list_plugins(self) -> List[str]:
|
||||
"""List all registered plugin IDs"""
|
||||
return list(self._plugin_classes.keys())
|
||||
|
||||
def list_loaded_plugins(self) -> List[str]:
|
||||
"""List all loaded plugin IDs"""
|
||||
return list(self._plugins.keys())
|
||||
|
||||
async def load_all_from_directory(self, plugin_dir: Path) -> None:
|
||||
"""Load all plugins from a directory"""
|
||||
if not plugin_dir.exists():
|
||||
logger.warning(f"Plugin directory does not exist: {plugin_dir}")
|
||||
return
|
||||
|
||||
for plugin_file in plugin_dir.glob("*.py"):
|
||||
if plugin_file.name.startswith("_"):
|
||||
continue
|
||||
|
||||
module_name = plugin_file.stem
|
||||
try:
|
||||
# Import the module
|
||||
spec = importlib.util.spec_from_file_location(module_name, plugin_file)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Find plugin classes in the module
|
||||
for name, obj in inspect.getmembers(module, inspect.isclass):
|
||||
if (issubclass(obj, ServicePlugin) and
|
||||
obj != ServicePlugin and
|
||||
not name.startswith("_")):
|
||||
self.register(obj)
|
||||
logger.info(f"Auto-registered plugin from {module_name}: {name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load plugin from {plugin_file}: {e}")
|
||||
|
||||
async def initialize(self, plugin_dir: Optional[Path] = None) -> None:
|
||||
"""Initialize the plugin registry"""
|
||||
if self._loaded:
|
||||
return
|
||||
|
||||
# Load built-in plugins
|
||||
from . import whisper, stable_diffusion, llm_inference, ffmpeg, blender
|
||||
|
||||
self.register(whisper.WhisperPlugin)
|
||||
self.register(stable_diffusion.StableDiffusionPlugin)
|
||||
self.register(llm_inference.LLMPlugin)
|
||||
self.register(ffmpeg.FFmpegPlugin)
|
||||
self.register(blender.BlenderPlugin)
|
||||
|
||||
# Load external plugins if directory provided
|
||||
if plugin_dir:
|
||||
await self.load_all_from_directory(plugin_dir)
|
||||
|
||||
self._loaded = True
|
||||
logger.info(f"Plugin registry initialized with {len(self._plugin_classes)} plugins")
|
||||
|
||||
async def health_check_all(self) -> Dict[str, bool]:
|
||||
"""Health check all loaded plugins"""
|
||||
results = {}
|
||||
for service_id, plugin in self._plugins.items():
|
||||
try:
|
||||
results[service_id] = await plugin.health_check()
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed for {service_id}: {e}")
|
||||
results[service_id] = False
|
||||
return results
|
||||
|
||||
def cleanup_all(self) -> None:
|
||||
"""Cleanup all loaded plugins"""
|
||||
for service_id in list(self._plugins.keys()):
|
||||
self.unload_plugin(service_id)
|
||||
logger.info("All plugins cleaned up")
|
||||
|
||||
|
||||
# Global registry instance
|
||||
registry = PluginRegistry()
|
||||
@@ -1,281 +0,0 @@
|
||||
"""
|
||||
Stable Diffusion image generation plugin
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
import io
|
||||
from typing import Dict, Any, List
|
||||
import time
|
||||
import numpy as np
|
||||
|
||||
from .base import GPUPlugin, PluginResult
|
||||
from .exceptions import PluginExecutionError
|
||||
|
||||
|
||||
class StableDiffusionPlugin(GPUPlugin):
|
||||
"""Plugin for Stable Diffusion image generation"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.service_id = "stable_diffusion"
|
||||
self.name = "Stable Diffusion"
|
||||
self.version = "1.0.0"
|
||||
self.description = "Generate images from text prompts using Stable Diffusion"
|
||||
self.capabilities = ["txt2img", "img2img"]
|
||||
self._model_cache = {}
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize Stable Diffusion dependencies"""
|
||||
super().setup()
|
||||
|
||||
# Check for diffusers installation
|
||||
try:
|
||||
from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline
|
||||
self.diffusers = StableDiffusionPipeline
|
||||
self.img2img_pipe = StableDiffusionImg2ImgPipeline
|
||||
except ImportError:
|
||||
raise PluginExecutionError("Diffusers not installed. Install with: pip install diffusers transformers accelerate")
|
||||
|
||||
# Check for torch
|
||||
try:
|
||||
import torch
|
||||
self.torch = torch
|
||||
except ImportError:
|
||||
raise PluginExecutionError("PyTorch not installed. Install with: pip install torch")
|
||||
|
||||
# Check for PIL
|
||||
try:
|
||||
from PIL import Image
|
||||
self.Image = Image
|
||||
except ImportError:
|
||||
raise PluginExecutionError("PIL not installed. Install with: pip install Pillow")
|
||||
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate Stable Diffusion request parameters"""
|
||||
errors = []
|
||||
|
||||
# Check required parameters
|
||||
if "prompt" not in request:
|
||||
errors.append("'prompt' is required")
|
||||
|
||||
# Validate model
|
||||
model = request.get("model", "runwayml/stable-diffusion-v1-5")
|
||||
valid_models = [
|
||||
"runwayml/stable-diffusion-v1-5",
|
||||
"stabilityai/stable-diffusion-2-1",
|
||||
"stabilityai/stable-diffusion-xl-base-1.0"
|
||||
]
|
||||
if model not in valid_models:
|
||||
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
|
||||
|
||||
# Validate dimensions
|
||||
width = request.get("width", 512)
|
||||
height = request.get("height", 512)
|
||||
|
||||
if not isinstance(width, int) or width < 256 or width > 1024:
|
||||
errors.append("Width must be an integer between 256 and 1024")
|
||||
if not isinstance(height, int) or height < 256 or height > 1024:
|
||||
errors.append("Height must be an integer between 256 and 1024")
|
||||
|
||||
# Validate steps
|
||||
steps = request.get("steps", 20)
|
||||
if not isinstance(steps, int) or steps < 1 or steps > 100:
|
||||
errors.append("Steps must be an integer between 1 and 100")
|
||||
|
||||
# Validate guidance scale
|
||||
guidance_scale = request.get("guidance_scale", 7.5)
|
||||
if not isinstance(guidance_scale, (int, float)) or guidance_scale < 1.0 or guidance_scale > 20.0:
|
||||
errors.append("Guidance scale must be between 1.0 and 20.0")
|
||||
|
||||
# Check img2img requirements
|
||||
if request.get("task") == "img2img":
|
||||
if "init_image" not in request:
|
||||
errors.append("'init_image' is required for img2img task")
|
||||
strength = request.get("strength", 0.8)
|
||||
if not isinstance(strength, (int, float)) or strength < 0.0 or strength > 1.0:
|
||||
errors.append("Strength must be between 0.0 and 1.0")
|
||||
|
||||
return errors
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for Stable Diffusion"""
|
||||
return {
|
||||
"gpu": "required",
|
||||
"vram_gb": 6,
|
||||
"ram_gb": 8,
|
||||
"cuda": "required"
|
||||
}
|
||||
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute Stable Diffusion generation"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Validate request
|
||||
errors = self.validate_request(request)
|
||||
if errors:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=f"Validation failed: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
# Get parameters
|
||||
prompt = request["prompt"]
|
||||
negative_prompt = request.get("negative_prompt", "")
|
||||
model_name = request.get("model", "runwayml/stable-diffusion-v1-5")
|
||||
width = request.get("width", 512)
|
||||
height = request.get("height", 512)
|
||||
steps = request.get("steps", 20)
|
||||
guidance_scale = request.get("guidance_scale", 7.5)
|
||||
num_images = request.get("num_images", 1)
|
||||
seed = request.get("seed")
|
||||
task = request.get("task", "txt2img")
|
||||
|
||||
# Load model
|
||||
pipe = await self._load_model(model_name)
|
||||
|
||||
# Generate images
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if task == "img2img":
|
||||
# Handle img2img
|
||||
init_image_data = request["init_image"]
|
||||
init_image = self._decode_image(init_image_data)
|
||||
strength = request.get("strength", 0.8)
|
||||
|
||||
images = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: pipe(
|
||||
prompt=prompt,
|
||||
negative_prompt=negative_prompt,
|
||||
image=init_image,
|
||||
strength=strength,
|
||||
num_inference_steps=steps,
|
||||
guidance_scale=guidance_scale,
|
||||
num_images_per_prompt=num_images,
|
||||
generator=self._get_generator(seed)
|
||||
).images
|
||||
)
|
||||
else:
|
||||
# Handle txt2img
|
||||
images = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: pipe(
|
||||
prompt=prompt,
|
||||
negative_prompt=negative_prompt,
|
||||
width=width,
|
||||
height=height,
|
||||
num_inference_steps=steps,
|
||||
guidance_scale=guidance_scale,
|
||||
num_images_per_prompt=num_images,
|
||||
generator=self._get_generator(seed)
|
||||
).images
|
||||
)
|
||||
|
||||
# Encode images to base64
|
||||
encoded_images = []
|
||||
for img in images:
|
||||
buffer = io.BytesIO()
|
||||
img.save(buffer, format="PNG")
|
||||
encoded_images.append(base64.b64encode(buffer.getvalue()).decode())
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"images": encoded_images,
|
||||
"count": len(images),
|
||||
"parameters": {
|
||||
"prompt": prompt,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"steps": steps,
|
||||
"guidance_scale": guidance_scale,
|
||||
"seed": seed
|
||||
}
|
||||
},
|
||||
metrics={
|
||||
"model": model_name,
|
||||
"task": task,
|
||||
"images_generated": len(images),
|
||||
"generation_time": execution_time,
|
||||
"time_per_image": execution_time / len(images)
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=str(e),
|
||||
execution_time=time.time() - start_time
|
||||
)
|
||||
|
||||
async def _load_model(self, model_name: str):
|
||||
"""Load Stable Diffusion model with caching"""
|
||||
if model_name not in self._model_cache:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
# Determine device
|
||||
device = "cuda" if self.torch.cuda.is_available() else "cpu"
|
||||
|
||||
# Load with attention slicing for memory efficiency
|
||||
pipe = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.diffusers.from_pretrained(
|
||||
model_name,
|
||||
torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32,
|
||||
safety_checker=None,
|
||||
requires_safety_checker=False
|
||||
)
|
||||
)
|
||||
|
||||
pipe = pipe.to(device)
|
||||
|
||||
# Enable memory optimizations
|
||||
if device == "cuda":
|
||||
pipe.enable_attention_slicing()
|
||||
if self.vram_gb < 8:
|
||||
pipe.enable_model_cpu_offload()
|
||||
|
||||
self._model_cache[model_name] = pipe
|
||||
|
||||
return self._model_cache[model_name]
|
||||
|
||||
def _decode_image(self, image_data: str) -> 'Image':
|
||||
"""Decode base64 image"""
|
||||
if image_data.startswith('data:image'):
|
||||
# Remove data URL prefix
|
||||
image_data = image_data.split(',')[1]
|
||||
|
||||
image_bytes = base64.b64decode(image_data)
|
||||
return self.Image.open(io.BytesIO(image_bytes))
|
||||
|
||||
def _get_generator(self, seed: Optional[int]):
|
||||
"""Get torch generator for reproducible results"""
|
||||
if seed is not None:
|
||||
return self.torch.Generator().manual_seed(seed)
|
||||
return None
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check Stable Diffusion health"""
|
||||
try:
|
||||
# Try to load a small model
|
||||
pipe = await self._load_model("runwayml/stable-diffusion-v1-5")
|
||||
return pipe is not None
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Cleanup resources"""
|
||||
# Move models to CPU and clear cache
|
||||
for pipe in self._model_cache.values():
|
||||
if hasattr(pipe, 'to'):
|
||||
pipe.to("cpu")
|
||||
self._model_cache.clear()
|
||||
|
||||
# Clear GPU cache
|
||||
if self.torch.cuda.is_available():
|
||||
self.torch.cuda.empty_cache()
|
||||
@@ -1,215 +0,0 @@
|
||||
"""
|
||||
Whisper speech recognition plugin
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Dict, Any, List
|
||||
import time
|
||||
|
||||
from .base import GPUPlugin, PluginResult
|
||||
from .exceptions import PluginExecutionError
|
||||
|
||||
|
||||
class WhisperPlugin(GPUPlugin):
|
||||
"""Plugin for Whisper speech recognition"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.service_id = "whisper"
|
||||
self.name = "Whisper Speech Recognition"
|
||||
self.version = "1.0.0"
|
||||
self.description = "Transcribe and translate audio files using OpenAI Whisper"
|
||||
self.capabilities = ["transcribe", "translate"]
|
||||
self._model_cache = {}
|
||||
|
||||
def setup(self) -> None:
|
||||
"""Initialize Whisper dependencies"""
|
||||
super().setup()
|
||||
|
||||
# Check for whisper installation
|
||||
try:
|
||||
import whisper
|
||||
self.whisper = whisper
|
||||
except ImportError:
|
||||
raise PluginExecutionError("Whisper not installed. Install with: pip install openai-whisper")
|
||||
|
||||
# Check for ffmpeg
|
||||
import subprocess
|
||||
try:
|
||||
subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
raise PluginExecutionError("FFmpeg not found. Install FFmpeg for audio processing")
|
||||
|
||||
def validate_request(self, request: Dict[str, Any]) -> List[str]:
|
||||
"""Validate Whisper request parameters"""
|
||||
errors = []
|
||||
|
||||
# Check required parameters
|
||||
if "audio_url" not in request and "audio_file" not in request:
|
||||
errors.append("Either 'audio_url' or 'audio_file' must be provided")
|
||||
|
||||
# Validate model
|
||||
model = request.get("model", "base")
|
||||
valid_models = ["tiny", "base", "small", "medium", "large", "large-v2", "large-v3"]
|
||||
if model not in valid_models:
|
||||
errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}")
|
||||
|
||||
# Validate task
|
||||
task = request.get("task", "transcribe")
|
||||
if task not in ["transcribe", "translate"]:
|
||||
errors.append("Task must be 'transcribe' or 'translate'")
|
||||
|
||||
# Validate language
|
||||
if "language" in request:
|
||||
language = request["language"]
|
||||
if not isinstance(language, str) or len(language) != 2:
|
||||
errors.append("Language must be a 2-letter language code (e.g., 'en', 'es')")
|
||||
|
||||
return errors
|
||||
|
||||
def get_hardware_requirements(self) -> Dict[str, Any]:
|
||||
"""Get hardware requirements for Whisper"""
|
||||
return {
|
||||
"gpu": "recommended",
|
||||
"vram_gb": 2,
|
||||
"ram_gb": 4,
|
||||
"storage_gb": 1
|
||||
}
|
||||
|
||||
async def execute(self, request: Dict[str, Any]) -> PluginResult:
|
||||
"""Execute Whisper transcription"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Validate request
|
||||
errors = self.validate_request(request)
|
||||
if errors:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=f"Validation failed: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
# Get parameters
|
||||
model_name = request.get("model", "base")
|
||||
task = request.get("task", "transcribe")
|
||||
language = request.get("language")
|
||||
temperature = request.get("temperature", 0.0)
|
||||
|
||||
# Load or get cached model
|
||||
model = await self._load_model(model_name)
|
||||
|
||||
# Get audio file
|
||||
audio_path = await self._get_audio_file(request)
|
||||
|
||||
# Transcribe
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if task == "translate":
|
||||
result = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: model.transcribe(
|
||||
audio_path,
|
||||
task="translate",
|
||||
temperature=temperature
|
||||
)
|
||||
)
|
||||
else:
|
||||
result = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: model.transcribe(
|
||||
audio_path,
|
||||
language=language,
|
||||
temperature=temperature
|
||||
)
|
||||
)
|
||||
|
||||
# Clean up
|
||||
if audio_path != request.get("audio_file"):
|
||||
os.unlink(audio_path)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
return PluginResult(
|
||||
success=True,
|
||||
data={
|
||||
"text": result["text"],
|
||||
"language": result.get("language"),
|
||||
"segments": result.get("segments", [])
|
||||
},
|
||||
metrics={
|
||||
"model": model_name,
|
||||
"task": task,
|
||||
"audio_duration": result.get("duration"),
|
||||
"processing_time": execution_time,
|
||||
"real_time_factor": result.get("duration", 0) / execution_time if execution_time > 0 else 0
|
||||
},
|
||||
execution_time=execution_time
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return PluginResult(
|
||||
success=False,
|
||||
error=str(e),
|
||||
execution_time=time.time() - start_time
|
||||
)
|
||||
|
||||
async def _load_model(self, model_name: str):
|
||||
"""Load Whisper model with caching"""
|
||||
if model_name not in self._model_cache:
|
||||
loop = asyncio.get_event_loop()
|
||||
model = await loop.run_in_executor(
|
||||
None,
|
||||
lambda: self.whisper.load_model(model_name)
|
||||
)
|
||||
self._model_cache[model_name] = model
|
||||
|
||||
return self._model_cache[model_name]
|
||||
|
||||
async def _get_audio_file(self, request: Dict[str, Any]) -> str:
|
||||
"""Get audio file from URL or direct file path"""
|
||||
if "audio_file" in request:
|
||||
return request["audio_file"]
|
||||
|
||||
# Download from URL
|
||||
audio_url = request["audio_url"]
|
||||
|
||||
# Use requests to download
|
||||
import requests
|
||||
|
||||
response = requests.get(audio_url, stream=True)
|
||||
response.raise_for_status()
|
||||
|
||||
# Save to temporary file
|
||||
suffix = self._get_audio_suffix(audio_url)
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as f:
|
||||
for chunk in response.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
return f.name
|
||||
|
||||
def _get_audio_suffix(self, url: str) -> str:
|
||||
"""Get file extension from URL"""
|
||||
if url.endswith('.mp3'):
|
||||
return '.mp3'
|
||||
elif url.endswith('.wav'):
|
||||
return '.wav'
|
||||
elif url.endswith('.m4a'):
|
||||
return '.m4a'
|
||||
elif url.endswith('.flac'):
|
||||
return '.flac'
|
||||
else:
|
||||
return '.mp3' # Default
|
||||
|
||||
async def health_check(self) -> bool:
|
||||
"""Check Whisper health"""
|
||||
try:
|
||||
# Check if we can load the tiny model
|
||||
await self._load_model("tiny")
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Cleanup resources"""
|
||||
self._model_cache.clear()
|
||||
@@ -1,30 +0,0 @@
|
||||
[tool.poetry]
|
||||
name = "aitbc-miner-node"
|
||||
version = "0.1.0"
|
||||
description = "AITBC miner node daemon"
|
||||
authors = ["AITBC Team"]
|
||||
packages = [
|
||||
{ include = "aitbc_miner", from = "src" }
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
httpx = "^0.27.0"
|
||||
pydantic = "^2.7.0"
|
||||
pyyaml = "^6.0.1"
|
||||
psutil = "^5.9.8"
|
||||
aiosignal = "^1.3.1"
|
||||
uvloop = { version = "^0.19.0", optional = true }
|
||||
asyncio = { version = "^3.4.3", optional = true }
|
||||
rich = "^13.7.1"
|
||||
|
||||
[tool.poetry.extras]
|
||||
uvloop = ["uvloop"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^8.2.0"
|
||||
pytest-asyncio = "^0.23.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
@@ -1 +0,0 @@
|
||||
"""AITBC miner node package."""
|
||||
@@ -1 +0,0 @@
|
||||
"""Control loop and background tasks for the miner node."""
|
||||
@@ -1,127 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from collections.abc import Callable
|
||||
from typing import Optional
|
||||
|
||||
from ..config import settings
|
||||
from ..logging import get_logger
|
||||
from ..coordinator import CoordinatorClient
|
||||
from ..util.probe import collect_capabilities, collect_runtime_metrics
|
||||
from ..util.backoff import compute_backoff
|
||||
from ..util.fs import ensure_workspace, write_json
|
||||
from ..runners import get_runner
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class MinerControlLoop:
|
||||
def __init__(self) -> None:
|
||||
self._tasks: list[asyncio.Task[None]] = []
|
||||
self._stop_event = asyncio.Event()
|
||||
self._coordinator = CoordinatorClient()
|
||||
self._capabilities_snapshot = collect_capabilities(settings.max_concurrent_cpu, settings.max_concurrent_gpu)
|
||||
self._current_backoff = settings.poll_interval_seconds
|
||||
|
||||
async def start(self) -> None:
|
||||
logger.info("Starting miner control loop", extra={"node_id": settings.node_id})
|
||||
await self._register()
|
||||
self._tasks.append(asyncio.create_task(self._heartbeat_loop()))
|
||||
self._tasks.append(asyncio.create_task(self._poll_loop()))
|
||||
|
||||
async def stop(self) -> None:
|
||||
logger.info("Stopping miner control loop")
|
||||
self._stop_event.set()
|
||||
for task in self._tasks:
|
||||
task.cancel()
|
||||
await asyncio.gather(*self._tasks, return_exceptions=True)
|
||||
await self._coordinator.aclose()
|
||||
|
||||
async def _register(self) -> None:
|
||||
payload = {
|
||||
"capabilities": self._capabilities_snapshot.capabilities,
|
||||
"concurrency": self._capabilities_snapshot.concurrency,
|
||||
"region": settings.region,
|
||||
}
|
||||
try:
|
||||
resp = await self._coordinator.register(payload)
|
||||
logger.info("Registered miner", extra={"resp": resp})
|
||||
except Exception as exc:
|
||||
logger.exception("Failed to register miner", exc_info=exc)
|
||||
raise
|
||||
|
||||
async def _heartbeat_loop(self) -> None:
|
||||
interval = settings.heartbeat_interval_seconds
|
||||
while not self._stop_event.is_set():
|
||||
payload = {
|
||||
"inflight": 0,
|
||||
"status": "ONLINE",
|
||||
"metadata": collect_runtime_metrics(),
|
||||
}
|
||||
try:
|
||||
await self._coordinator.heartbeat(payload)
|
||||
logger.debug("heartbeat sent")
|
||||
except Exception as exc:
|
||||
logger.warning("heartbeat failed", exc_info=exc)
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
async def _poll_loop(self) -> None:
|
||||
interval = settings.poll_interval_seconds
|
||||
while not self._stop_event.is_set():
|
||||
payload = {"max_wait_seconds": interval}
|
||||
try:
|
||||
job = await self._coordinator.poll(payload)
|
||||
if job:
|
||||
logger.info("received job", extra={"job_id": job.get("job_id")})
|
||||
self._current_backoff = settings.poll_interval_seconds
|
||||
await self._handle_job(job)
|
||||
else:
|
||||
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
|
||||
logger.debug("no job; next poll interval=%s", interval)
|
||||
except Exception as exc:
|
||||
logger.warning("poll failed", exc_info=exc)
|
||||
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
async def _handle_job(self, job: dict) -> None:
|
||||
job_id = job.get("job_id", "unknown")
|
||||
workspace = ensure_workspace(settings.workspace_root, job_id)
|
||||
runner_kind = job.get("runner", {}).get("kind", "noop")
|
||||
runner = get_runner(runner_kind)
|
||||
|
||||
try:
|
||||
result = await runner.run(job, workspace)
|
||||
except Exception as exc:
|
||||
logger.exception("runner crashed", extra={"job_id": job_id, "runner": runner_kind})
|
||||
await self._coordinator.submit_failure(
|
||||
job_id,
|
||||
{
|
||||
"error_code": "RUNTIME_ERROR",
|
||||
"error_message": str(exc),
|
||||
"metrics": {},
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
if result.ok:
|
||||
write_json(workspace / "result.json", result.output)
|
||||
try:
|
||||
await self._coordinator.submit_result(
|
||||
job_id,
|
||||
{
|
||||
"result": result.output,
|
||||
"metrics": {"workspace": str(workspace)},
|
||||
},
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning("failed to submit result", extra={"job_id": job_id}, exc_info=exc)
|
||||
else:
|
||||
await self._coordinator.submit_failure(
|
||||
job_id,
|
||||
{
|
||||
"error_code": result.output.get("error_code", "FAILED"),
|
||||
"error_message": result.output.get("error_message", "Job failed"),
|
||||
"metrics": result.output.get("metrics", {}),
|
||||
},
|
||||
)
|
||||
@@ -1,40 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class MinerSettings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
|
||||
|
||||
node_id: str = "node-dev-1"
|
||||
coordinator_base_url: str = "http://127.0.0.1:8011/v1"
|
||||
auth_token: str = "REDACTED_MINER_KEY"
|
||||
region: Optional[str] = None
|
||||
|
||||
workspace_root: Path = Field(default=Path("/var/lib/aitbc/miner/jobs"))
|
||||
cache_root: Path = Field(default=Path("/var/lib/aitbc/miner/cache"))
|
||||
|
||||
heartbeat_interval_seconds: int = 15
|
||||
heartbeat_jitter_pct: int = 10
|
||||
heartbeat_timeout_seconds: int = 60
|
||||
|
||||
poll_interval_seconds: int = 3
|
||||
max_backoff_seconds: int = 60
|
||||
|
||||
max_concurrent_cpu: int = 1
|
||||
max_concurrent_gpu: int = 1
|
||||
|
||||
enable_cli_runner: bool = True
|
||||
enable_python_runner: bool = True
|
||||
|
||||
allowlist_dir: Path = Field(default=Path("/etc/aitbc/miner/allowlist.d"))
|
||||
|
||||
log_level: str = "INFO"
|
||||
log_path: Optional[Path] = None
|
||||
|
||||
|
||||
settings = MinerSettings()
|
||||
@@ -1,76 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from .config import MinerSettings, settings
|
||||
from .logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class CoordinatorClient:
|
||||
"""Async HTTP client for interacting with the coordinator API."""
|
||||
|
||||
def __init__(self, cfg: MinerSettings | None = None) -> None:
|
||||
self.cfg = cfg or settings
|
||||
self._client: Optional[httpx.AsyncClient] = None
|
||||
|
||||
@property
|
||||
def client(self) -> httpx.AsyncClient:
|
||||
if self._client is None:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.cfg.auth_token}",
|
||||
"User-Agent": f"aitbc-miner/{self.cfg.node_id}",
|
||||
}
|
||||
timeout = httpx.Timeout(connect=5.0, read=30.0, write=10.0, pool=None)
|
||||
self._client = httpx.AsyncClient(base_url=self.cfg.coordinator_base_url.rstrip("/"), headers=headers, timeout=timeout)
|
||||
return self._client
|
||||
|
||||
async def aclose(self) -> None:
|
||||
if self._client:
|
||||
await self._client.aclose()
|
||||
self._client = None
|
||||
|
||||
async def register(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
logger.debug("registering miner", extra={"payload": payload})
|
||||
resp = await self.client.post("/miners/register", json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
async def heartbeat(self, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
resp = await self.client.post("/miners/heartbeat", json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
async def poll(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
resp = await self.client.post("/miners/poll", json=payload)
|
||||
if resp.status_code == 204:
|
||||
logger.debug("no job available")
|
||||
return None
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
async def submit_result(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
resp = await self.client.post(f"/miners/{job_id}/result", json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
async def submit_failure(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]:
|
||||
resp = await self.client.post(f"/miners/{job_id}/fail", json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
async def __aenter__(self) -> "CoordinatorClient":
|
||||
_ = self.client
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb) -> None:
|
||||
await self.aclose()
|
||||
|
||||
|
||||
async def backoff(base: float, max_seconds: float) -> float:
|
||||
await asyncio.sleep(base)
|
||||
return min(base * 2, max_seconds)
|
||||
@@ -1,25 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from .config import settings
|
||||
|
||||
|
||||
def configure_logging(level: Optional[str] = None, log_path: Optional[str] = None) -> None:
|
||||
log_level = getattr(logging, (level or settings.log_level).upper(), logging.INFO)
|
||||
handlers: list[logging.Handler] = [logging.StreamHandler()]
|
||||
if log_path:
|
||||
handlers.append(logging.FileHandler(log_path))
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format="%(asctime)s %(levelname)s %(name)s :: %(message)s",
|
||||
handlers=handlers,
|
||||
)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
if not logging.getLogger().handlers:
|
||||
configure_logging(settings.log_level, settings.log_path.as_posix() if settings.log_path else None)
|
||||
return logging.getLogger(name)
|
||||
@@ -1,51 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import signal
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncIterator
|
||||
|
||||
from .config import settings
|
||||
from .logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class MinerApplication:
|
||||
def __init__(self) -> None:
|
||||
self._stop_event = asyncio.Event()
|
||||
|
||||
async def start(self) -> None:
|
||||
logger.info("Miner node starting", extra={"node_id": settings.node_id})
|
||||
# TODO: initialize capability probe, register with coordinator, start heartbeat and poll loops
|
||||
await self._stop_event.wait()
|
||||
|
||||
async def stop(self) -> None:
|
||||
logger.info("Miner node shutting down")
|
||||
self._stop_event.set()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def miner_app() -> AsyncIterator[MinerApplication]:
|
||||
app = MinerApplication()
|
||||
try:
|
||||
yield app
|
||||
finally:
|
||||
await app.stop()
|
||||
|
||||
|
||||
def run() -> None:
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
async def _run() -> None:
|
||||
async with miner_app() as app:
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: asyncio.create_task(app.stop()))
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: asyncio.create_task(app.stop()))
|
||||
await app.start()
|
||||
|
||||
loop.run_until_complete(_run())
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
run()
|
||||
@@ -1,20 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from .base import BaseRunner
|
||||
from .cli.simple import CLIRunner
|
||||
from .python.noop import PythonNoopRunner
|
||||
from .service import ServiceRunner
|
||||
|
||||
|
||||
_RUNNERS: Dict[str, BaseRunner] = {
|
||||
"cli": CLIRunner(),
|
||||
"python": PythonNoopRunner(),
|
||||
"noop": PythonNoopRunner(),
|
||||
"service": ServiceRunner(),
|
||||
}
|
||||
|
||||
|
||||
def get_runner(kind: str) -> BaseRunner:
|
||||
return _RUNNERS.get(kind, _RUNNERS["noop"])
|
||||
@@ -1,17 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class RunnerResult:
|
||||
ok: bool
|
||||
output: Dict[str, Any]
|
||||
artifacts: Dict[str, Path] | None = None
|
||||
|
||||
|
||||
class BaseRunner:
|
||||
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
|
||||
raise NotImplementedError
|
||||
@@ -1,62 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from ..base import BaseRunner, RunnerResult
|
||||
|
||||
|
||||
class CLIRunner(BaseRunner):
|
||||
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
|
||||
runner_cfg = job.get("runner", {})
|
||||
command: List[str] = runner_cfg.get("command", [])
|
||||
if not command:
|
||||
return RunnerResult(
|
||||
ok=False,
|
||||
output={
|
||||
"error_code": "INVALID_COMMAND",
|
||||
"error_message": "runner.command is required for CLI jobs",
|
||||
"metrics": {},
|
||||
},
|
||||
)
|
||||
|
||||
stdout_path = workspace / "stdout.log"
|
||||
stderr_path = workspace / "stderr.log"
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command,
|
||||
cwd=str(workspace),
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
stdout_bytes, stderr_bytes = await process.communicate()
|
||||
stdout_path.write_bytes(stdout_bytes)
|
||||
stderr_path.write_bytes(stderr_bytes)
|
||||
|
||||
if process.returncode == 0:
|
||||
return RunnerResult(
|
||||
ok=True,
|
||||
output={
|
||||
"exit_code": 0,
|
||||
"stdout": stdout_path.name,
|
||||
"stderr": stderr_path.name,
|
||||
},
|
||||
artifacts={
|
||||
"stdout": stdout_path,
|
||||
"stderr": stderr_path,
|
||||
},
|
||||
)
|
||||
|
||||
return RunnerResult(
|
||||
ok=False,
|
||||
output={
|
||||
"error_code": "PROCESS_FAILED",
|
||||
"error_message": f"command exited with code {process.returncode}",
|
||||
"metrics": {
|
||||
"exit_code": process.returncode,
|
||||
"stderr": stderr_path.name,
|
||||
},
|
||||
},
|
||||
)
|
||||
@@ -1,20 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..base import BaseRunner, RunnerResult
|
||||
|
||||
|
||||
class PythonNoopRunner(BaseRunner):
|
||||
async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult:
|
||||
await asyncio.sleep(0)
|
||||
payload = job.get("payload", {})
|
||||
return RunnerResult(
|
||||
ok=True,
|
||||
output={
|
||||
"echo": payload,
|
||||
"message": "python noop runner executed",
|
||||
},
|
||||
)
|
||||
@@ -1,118 +0,0 @@
|
||||
"""
|
||||
Service runner for executing GPU service jobs via plugins
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from .base import BaseRunner
|
||||
from ...config import settings
|
||||
from ...logging import get_logger
|
||||
|
||||
# Add plugins directory to path
|
||||
plugins_path = Path(__file__).parent.parent.parent.parent / "plugins"
|
||||
sys.path.insert(0, str(plugins_path))
|
||||
|
||||
try:
|
||||
from plugins.discovery import ServiceDiscovery
|
||||
except ImportError:
|
||||
ServiceDiscovery = None
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ServiceRunner(BaseRunner):
|
||||
"""Runner for GPU service jobs using the plugin system"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.discovery: Optional[ServiceDiscovery] = None
|
||||
self._initialized = False
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the service discovery system"""
|
||||
if self._initialized:
|
||||
return
|
||||
|
||||
if ServiceDiscovery is None:
|
||||
raise ImportError("ServiceDiscovery not available. Check plugin installation.")
|
||||
|
||||
# Create service discovery
|
||||
pool_hub_url = getattr(settings, 'pool_hub_url', 'http://localhost:8001')
|
||||
miner_id = getattr(settings, 'node_id', 'miner-1')
|
||||
|
||||
self.discovery = ServiceDiscovery(pool_hub_url, miner_id)
|
||||
await self.discovery.start()
|
||||
self._initialized = True
|
||||
|
||||
logger.info("Service runner initialized")
|
||||
|
||||
async def run(self, job: Dict[str, Any], workspace: Path) -> Dict[str, Any]:
|
||||
"""Execute a service job"""
|
||||
await self.initialize()
|
||||
|
||||
job_id = job.get("job_id", "unknown")
|
||||
|
||||
try:
|
||||
# Extract service type and parameters
|
||||
service_type = job.get("service_type")
|
||||
if not service_type:
|
||||
raise ValueError("Job missing service_type")
|
||||
|
||||
# Get service parameters from job
|
||||
service_params = job.get("parameters", {})
|
||||
|
||||
logger.info(f"Executing service job", extra={
|
||||
"job_id": job_id,
|
||||
"service_type": service_type
|
||||
})
|
||||
|
||||
# Execute via plugin system
|
||||
result = await self.discovery.execute_service(service_type, service_params)
|
||||
|
||||
# Save result to workspace
|
||||
result_file = workspace / "result.json"
|
||||
with open(result_file, "w") as f:
|
||||
json.dump(result, f, indent=2)
|
||||
|
||||
if result["success"]:
|
||||
logger.info(f"Service job completed successfully", extra={
|
||||
"job_id": job_id,
|
||||
"execution_time": result.get("execution_time")
|
||||
})
|
||||
|
||||
# Return success result
|
||||
return {
|
||||
"status": "completed",
|
||||
"result": result["data"],
|
||||
"metrics": result.get("metrics", {}),
|
||||
"execution_time": result.get("execution_time")
|
||||
}
|
||||
else:
|
||||
logger.error(f"Service job failed", extra={
|
||||
"job_id": job_id,
|
||||
"error": result.get("error")
|
||||
})
|
||||
|
||||
# Return failure result
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": result.get("error", "Unknown error"),
|
||||
"execution_time": result.get("execution_time")
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Service runner failed", extra={"job_id": job_id})
|
||||
return {
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
async def cleanup(self) -> None:
|
||||
"""Cleanup resources"""
|
||||
if self.discovery:
|
||||
await self.discovery.stop()
|
||||
self._initialized = False
|
||||
@@ -1,19 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import random
|
||||
|
||||
|
||||
def compute_backoff(base: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
|
||||
backoff = min(base * factor, max_seconds)
|
||||
jitter = backoff * (jitter_pct / 100.0)
|
||||
return max(0.0, random.uniform(backoff - jitter, backoff + jitter))
|
||||
|
||||
|
||||
def next_backoff(current: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
|
||||
return compute_backoff(current, factor, jitter_pct, max_seconds)
|
||||
|
||||
|
||||
async def sleep_with_backoff(delay: float, factor: float, jitter_pct: float, max_seconds: float) -> float:
|
||||
await asyncio.sleep(delay)
|
||||
return next_backoff(delay, factor, jitter_pct, max_seconds)
|
||||
@@ -1,15 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def ensure_workspace(root: Path, job_id: str) -> Path:
|
||||
path = root / job_id
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def write_json(path: Path, data: dict) -> None:
|
||||
import json
|
||||
|
||||
path.write_text(json.dumps(data, indent=2), encoding="utf-8")
|
||||
@@ -1,91 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import psutil
|
||||
|
||||
|
||||
@dataclass
|
||||
class CapabilitySnapshot:
|
||||
capabilities: Dict[str, Any]
|
||||
concurrency: int
|
||||
region: str | None = None
|
||||
|
||||
|
||||
def collect_capabilities(max_cpu_concurrency: int, max_gpu_concurrency: int) -> CapabilitySnapshot:
|
||||
cpu_count = psutil.cpu_count(logical=True) or 1
|
||||
total_mem = psutil.virtual_memory().total
|
||||
gpu_info = _detect_gpus()
|
||||
|
||||
capabilities: Dict[str, Any] = {
|
||||
"node": platform.node(),
|
||||
"python_version": platform.python_version(),
|
||||
"platform": platform.platform(),
|
||||
"cpu": {
|
||||
"logical_cores": cpu_count,
|
||||
"model": platform.processor(),
|
||||
},
|
||||
"memory": {
|
||||
"total_bytes": total_mem,
|
||||
"total_gb": round(total_mem / (1024**3), 2),
|
||||
},
|
||||
"runners": {
|
||||
"cli": True,
|
||||
"python": True,
|
||||
},
|
||||
}
|
||||
|
||||
if gpu_info:
|
||||
capabilities["gpus"] = gpu_info
|
||||
|
||||
concurrency = max(1, max_cpu_concurrency, max_gpu_concurrency if gpu_info else 0)
|
||||
return CapabilitySnapshot(capabilities=capabilities, concurrency=concurrency)
|
||||
|
||||
|
||||
def collect_runtime_metrics() -> Dict[str, Any]:
|
||||
vm = psutil.virtual_memory()
|
||||
load_avg = psutil.getloadavg() if hasattr(psutil, "getloadavg") else (0, 0, 0)
|
||||
return {
|
||||
"cpu_percent": psutil.cpu_percent(interval=None),
|
||||
"load_avg": load_avg,
|
||||
"memory_percent": vm.percent,
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
|
||||
|
||||
def _detect_gpus() -> List[Dict[str, Any]]:
|
||||
nvidia_smi = shutil.which("nvidia-smi")
|
||||
if not nvidia_smi:
|
||||
return []
|
||||
try:
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
nvidia_smi,
|
||||
"--query-gpu=name,memory.total",
|
||||
"--format=csv,noheader"
|
||||
],
|
||||
stderr=subprocess.DEVNULL,
|
||||
text=True,
|
||||
)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
return []
|
||||
|
||||
gpus: List[Dict[str, Any]] = []
|
||||
for line in output.strip().splitlines():
|
||||
parts = [p.strip() for p in line.split(",")]
|
||||
if not parts:
|
||||
continue
|
||||
name = parts[0]
|
||||
mem_mb = None
|
||||
if len(parts) > 1 and parts[1].lower().endswith(" mib"):
|
||||
try:
|
||||
mem_mb = int(float(parts[1].split()[0]))
|
||||
except ValueError:
|
||||
mem_mb = None
|
||||
gpus.append({"name": name, "memory_mb": mem_mb})
|
||||
return gpus
|
||||
@@ -1,37 +0,0 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from aitbc_miner.runners.cli.simple import CLIRunner
|
||||
from aitbc_miner.runners.python.noop import PythonNoopRunner
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_python_noop_runner(tmp_path: Path):
|
||||
runner = PythonNoopRunner()
|
||||
job = {"payload": {"value": 42}}
|
||||
result = await runner.run(job, tmp_path)
|
||||
assert result.ok
|
||||
assert result.output["echo"] == job["payload"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cli_runner_success(tmp_path: Path):
|
||||
runner = CLIRunner()
|
||||
job = {"runner": {"command": ["echo", "hello"]}}
|
||||
result = await runner.run(job, tmp_path)
|
||||
assert result.ok
|
||||
assert result.artifacts is not None
|
||||
stdout_path = result.artifacts["stdout"]
|
||||
assert stdout_path.exists()
|
||||
assert stdout_path.read_text().strip() == "hello"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cli_runner_invalid_command(tmp_path: Path):
|
||||
runner = CLIRunner()
|
||||
job = {"runner": {}}
|
||||
result = await runner.run(job, tmp_path)
|
||||
assert not result.ok
|
||||
assert result.output["error_code"] == "INVALID_COMMAND"
|
||||
@@ -1,245 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AITBC Wallet CLI - A command-line wallet for AITBC blockchain
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
import httpx
|
||||
from datetime import datetime
|
||||
|
||||
# Configuration
|
||||
BLOCKCHAIN_RPC = "http://127.0.0.1:9080"
|
||||
WALLET_DIR = Path.home() / ".aitbc" / "wallets"
|
||||
|
||||
def print_header():
|
||||
"""Print wallet CLI header"""
|
||||
print("=" * 50)
|
||||
print(" AITBC Blockchain Wallet CLI")
|
||||
print("=" * 50)
|
||||
|
||||
def check_blockchain_connection():
|
||||
"""Check if connected to blockchain"""
|
||||
# First check if node is running by checking metrics
|
||||
try:
|
||||
response = httpx.get(f"{BLOCKCHAIN_RPC}/metrics", timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
# Node is running, now try RPC
|
||||
try:
|
||||
rpc_response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/head", timeout=5.0)
|
||||
if rpc_response.status_code == 200:
|
||||
data = rpc_response.json()
|
||||
return True, data.get("height", "unknown"), data.get("hash", "unknown")[:16] + "..."
|
||||
else:
|
||||
return False, f"RPC endpoint error (HTTP {rpc_response.status_code})", "node_running"
|
||||
except Exception as e:
|
||||
return False, f"RPC error: {str(e)}", "node_running"
|
||||
return False, f"Node not responding (HTTP {response.status_code})", None
|
||||
except Exception as e:
|
||||
return False, str(e), None
|
||||
|
||||
def get_balance(address):
|
||||
"""Get balance for an address"""
|
||||
try:
|
||||
response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/getBalance/{address}", timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {"error": f"HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
def list_wallets():
|
||||
"""List local wallets"""
|
||||
WALLET_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
wallets = []
|
||||
for wallet_file in WALLET_DIR.glob("*.json"):
|
||||
try:
|
||||
with open(wallet_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
wallets.append({
|
||||
"id": wallet_file.stem,
|
||||
"address": data.get("address", "unknown"),
|
||||
"public_key": data.get("public_key", "unknown"),
|
||||
"created": data.get("created_at", "unknown")
|
||||
})
|
||||
except Exception as e:
|
||||
continue
|
||||
return wallets
|
||||
|
||||
def create_wallet(wallet_id, address=None):
|
||||
"""Create a new wallet file"""
|
||||
WALLET_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
wallet_file = WALLET_DIR / f"{wallet_id}.json"
|
||||
if wallet_file.exists():
|
||||
return False, "Wallet already exists"
|
||||
|
||||
# Generate a mock address if not provided
|
||||
if not address:
|
||||
address = f"aitbc1{wallet_id}{'x' * (40 - len(wallet_id))}"
|
||||
|
||||
# Generate a mock public key
|
||||
public_key = f"0x{'1234567890abcdef' * 4}"
|
||||
|
||||
wallet_data = {
|
||||
"wallet_id": wallet_id,
|
||||
"address": address,
|
||||
"public_key": public_key,
|
||||
"created_at": datetime.now().isoformat() + "Z",
|
||||
"note": "This is a demo wallet file - not for production use"
|
||||
}
|
||||
|
||||
try:
|
||||
with open(wallet_file, 'w') as f:
|
||||
json.dump(wallet_data, f, indent=2)
|
||||
return True, f"Wallet created: {wallet_file}"
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def get_block_info(height=None):
|
||||
try:
|
||||
if height:
|
||||
url = f"{BLOCKCHAIN_RPC}/rpc/blocks/{height}"
|
||||
else:
|
||||
url = f"{BLOCKCHAIN_RPC}/rpc/head"
|
||||
|
||||
response = httpx.get(url, timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {"error": f"HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="AITBC Blockchain Wallet CLI",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
%(prog)s status Check blockchain connection
|
||||
%(prog)s list List all local wallets
|
||||
%(prog)s balance <address> Get balance of an address
|
||||
%(prog)s block Show latest block info
|
||||
%(prog)s block <height> Show specific block info
|
||||
"""
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
||||
|
||||
# Status command
|
||||
status_parser = subparsers.add_parser("status", help="Check blockchain connection status")
|
||||
|
||||
# List command
|
||||
list_parser = subparsers.add_parser("list", help="List all local wallets")
|
||||
|
||||
# Balance command
|
||||
balance_parser = subparsers.add_parser("balance", help="Get balance for an address")
|
||||
balance_parser.add_argument("address", help="Wallet address to check")
|
||||
|
||||
# Block command
|
||||
block_parser = subparsers.add_parser("block", help="Get block information")
|
||||
block_parser.add_argument("height", nargs="?", type=int, help="Block height (optional)")
|
||||
|
||||
# Create command
|
||||
create_parser = subparsers.add_parser("create", help="Create a new wallet file")
|
||||
create_parser.add_argument("wallet_id", help="Wallet identifier")
|
||||
create_parser.add_argument("--address", help="Wallet address")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
print_header()
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
if args.command == "status":
|
||||
print_header()
|
||||
print("Checking blockchain connection...\n")
|
||||
|
||||
connected, info, block_hash = check_blockchain_connection()
|
||||
if connected:
|
||||
print(f"✅ Status: CONNECTED")
|
||||
print(f"📦 Node: {BLOCKCHAIN_RPC}")
|
||||
print(f"🔗 Latest Block: #{info}")
|
||||
print(f"🧮 Block Hash: {block_hash}")
|
||||
print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
elif block_hash == "node_running":
|
||||
print(f"⚠️ Status: NODE RUNNING - RPC UNAVAILABLE")
|
||||
print(f"📦 Node: {BLOCKCHAIN_RPC}")
|
||||
print(f"❌ RPC Error: {info}")
|
||||
print(f"💡 The blockchain node is running but RPC endpoints are not working")
|
||||
print(f" This might be due to initialization or database issues")
|
||||
print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
else:
|
||||
print(f"❌ Status: DISCONNECTED")
|
||||
print(f"📦 Node: {BLOCKCHAIN_RPC}")
|
||||
print(f"⚠️ Error: {info}")
|
||||
print(f"💡 Make sure the blockchain node is running on port 9080")
|
||||
|
||||
elif args.command == "list":
|
||||
print_header()
|
||||
wallets = list_wallets()
|
||||
|
||||
if wallets:
|
||||
print(f"Found {len(wallets)} wallet(s) in {WALLET_DIR}:\n")
|
||||
for w in wallets:
|
||||
print(f"🔐 Wallet ID: {w['id']}")
|
||||
print(f" Address: {w['address']}")
|
||||
print(f" Public Key: {w['public_key'][:20]}...")
|
||||
print(f" Created: {w['created']}")
|
||||
print()
|
||||
else:
|
||||
print(f"No wallets found in {WALLET_DIR}")
|
||||
print("\n💡 To create a wallet, use the wallet-daemon service")
|
||||
|
||||
elif args.command == "balance":
|
||||
print_header()
|
||||
print(f"Checking balance for address: {args.address}\n")
|
||||
|
||||
result = get_balance(args.address)
|
||||
if "error" in result:
|
||||
print(f"❌ Error: {result['error']}")
|
||||
else:
|
||||
balance = result.get("balance", 0)
|
||||
print(f"💰 Balance: {balance} AITBC")
|
||||
print(f"📍 Address: {args.address}")
|
||||
|
||||
elif args.command == "block":
|
||||
print_header()
|
||||
if args.height:
|
||||
print(f"Getting block #{args.height}...\n")
|
||||
else:
|
||||
print("Getting latest block...\n")
|
||||
|
||||
result = get_block_info(args.height)
|
||||
if "error" in result:
|
||||
print(f"❌ Error: {result['error']}")
|
||||
else:
|
||||
print(f"📦 Block Height: {result.get('height', 'unknown')}")
|
||||
print(f"🧮 Block Hash: {result.get('hash', 'unknown')}")
|
||||
print(f"⏰ Timestamp: {result.get('timestamp', 'unknown')}")
|
||||
print(f"👤 Proposer: {result.get('proposer', 'unknown')}")
|
||||
print(f"📊 Transactions: {len(result.get('transactions', []))}")
|
||||
|
||||
elif args.command == "create":
|
||||
print_header()
|
||||
success, message = create_wallet(args.wallet_id, args.address)
|
||||
if success:
|
||||
print(f"✅ {message}")
|
||||
print(f"\nWallet Details:")
|
||||
print(f" ID: {args.wallet_id}")
|
||||
print(f" Address: {args.address or f'aitbc1{args.wallet_id}...'}")
|
||||
print(f"\n💡 This is a demo wallet file for testing purposes")
|
||||
print(f" Use 'aitbc-wallet list' to see all wallets")
|
||||
else:
|
||||
print(f"❌ Error: {message}")
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,102 +0,0 @@
|
||||
.TH AITBC-WALLET "1" "December 2025" "AITBC Wallet CLI" "User Commands"
|
||||
.SH NAME
|
||||
aitbc-wallet \- AITBC Blockchain Wallet Command Line Interface
|
||||
.SH SYNOPSIS
|
||||
.B aitbc-wallet
|
||||
[\fIOPTIONS\fR] \fICOMMAND\fR [\fIARGS\fR]
|
||||
.SH DESCRIPTION
|
||||
The AITBC Wallet CLI is a command-line tool for interacting with the AITBC blockchain. It allows you to manage wallets, check balances, and monitor blockchain status without exposing your wallet to web interfaces.
|
||||
.SH COMMANDS
|
||||
.TP
|
||||
\fBstatus\fR
|
||||
Check if the wallet is connected to the AITBC blockchain node.
|
||||
.TP
|
||||
\fBlist\fR
|
||||
List all local wallets stored in ~/.aitbc/wallets/.
|
||||
.TP
|
||||
\fBbalance\fR \fIADDRESS\fR
|
||||
Get the AITBC token balance for the specified address.
|
||||
.TP
|
||||
\fBblock\fR [\fIHEIGHT\fR]
|
||||
Show information about the latest block or a specific block height.
|
||||
.SH EXAMPLES
|
||||
Check blockchain connection status:
|
||||
.P
|
||||
.RS 4
|
||||
.nf
|
||||
$ aitbc-wallet status
|
||||
==================================================
|
||||
AITBC Blockchain Wallet CLI
|
||||
==================================================
|
||||
Checking blockchain connection...
|
||||
|
||||
✅ Status: CONNECTED
|
||||
📦 Node: http://127.0.0.1:9080
|
||||
🔗 Latest Block: #42
|
||||
🧮 Block Hash: 0x1234...abcd
|
||||
⏰ Checked at: 2025-12-28 10:30:00
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
List all wallets:
|
||||
.P
|
||||
.RS 4
|
||||
.nf
|
||||
$ aitbc-wallet list
|
||||
==================================================
|
||||
AITBC Blockchain Wallet CLI
|
||||
==================================================
|
||||
Found 1 wallet(s) in /home/user/.aitbc/wallets:
|
||||
|
||||
🔐 Wallet ID: demo-wallet
|
||||
Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
|
||||
Public Key: 0x3aaa0a91f69d886a90...
|
||||
Created: 2025-12-28T10:30:00Z
|
||||
.fi
|
||||
.RE
|
||||
.P
|
||||
Check wallet balance:
|
||||
.P
|
||||
.RS 4
|
||||
.nf
|
||||
$ aitbc-wallet balance aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
|
||||
==================================================
|
||||
AITBC Blockchain Wallet CLI
|
||||
==================================================
|
||||
Checking balance for address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
|
||||
|
||||
💰 Balance: 1000 AITBC
|
||||
📍 Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c
|
||||
.fi
|
||||
.RE
|
||||
.SH FILES
|
||||
.TP
|
||||
.I ~/.aitbc/wallets/
|
||||
Directory where local wallet files are stored.
|
||||
.TP
|
||||
.I /usr/local/bin/aitbc-wallet
|
||||
The wallet CLI executable.
|
||||
.SH ENVIRONMENT
|
||||
.TP
|
||||
.I BLOCKCHAIN_RPC
|
||||
The blockchain node RPC URL (default: http://127.0.0.1:9080).
|
||||
.SH SECURITY
|
||||
.P
|
||||
The wallet CLI is designed with security in mind:
|
||||
.RS 4
|
||||
.IP \(bu 4
|
||||
No web interface - purely command-line based
|
||||
.IP \(bu 4
|
||||
Wallets stored locally in encrypted format
|
||||
.IP \(bu 4
|
||||
Only connects to localhost blockchain node by default
|
||||
.IP \(bu 4
|
||||
No exposure of private keys to network services
|
||||
.RE
|
||||
.SH BUGS
|
||||
Report bugs to the AITBC project issue tracker.
|
||||
.SH SEE ALSO
|
||||
.BR aitbc-blockchain (8),
|
||||
.BR aitbc-coordinator (8)
|
||||
.SH AUTHOR
|
||||
AITBC Development Team
|
||||
@@ -1,256 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AITBC Wallet CLI - Command Line Interface for AITBC Blockchain Wallet
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
import httpx
|
||||
|
||||
# Add parent directory to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent / "wallet-daemon" / "src"))
|
||||
|
||||
from app.keystore.service import KeystoreService
|
||||
from app.ledger_mock import SQLiteLedgerAdapter
|
||||
from app.settings import Settings
|
||||
|
||||
|
||||
class AITBCWallet:
|
||||
"""AITBC Blockchain Wallet CLI"""
|
||||
|
||||
def __init__(self, wallet_dir: str = None):
|
||||
self.wallet_dir = Path(wallet_dir or os.path.expanduser("~/.aitbc/wallets"))
|
||||
self.wallet_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.keystore = KeystoreService()
|
||||
self.blockchain_rpc = "http://127.0.0.1:9080" # Default blockchain node RPC
|
||||
|
||||
def _get_wallet_path(self, wallet_id: str) -> Path:
|
||||
"""Get the path to a wallet file"""
|
||||
return self.wallet_dir / f"{wallet_id}.wallet"
|
||||
|
||||
def create_wallet(self, wallet_id: str, password: str) -> dict:
|
||||
"""Create a new wallet"""
|
||||
wallet_path = self._get_wallet_path(wallet_id)
|
||||
|
||||
if wallet_path.exists():
|
||||
return {"error": "Wallet already exists"}
|
||||
|
||||
# Generate keypair
|
||||
keypair = self.keystore.generate_keypair()
|
||||
|
||||
# Store encrypted wallet
|
||||
wallet_data = {
|
||||
"wallet_id": wallet_id,
|
||||
"public_key": keypair["public_key"],
|
||||
"encrypted_private_key": keypair["encrypted_private_key"],
|
||||
"salt": keypair["salt"]
|
||||
}
|
||||
|
||||
# Encrypt and save
|
||||
self.keystore.save_wallet(wallet_path, wallet_data, password)
|
||||
|
||||
return {
|
||||
"wallet_id": wallet_id,
|
||||
"public_key": keypair["public_key"],
|
||||
"status": "created"
|
||||
}
|
||||
|
||||
def list_wallets(self) -> list:
|
||||
"""List all wallet addresses"""
|
||||
wallets = []
|
||||
for wallet_file in self.wallet_dir.glob("*.wallet"):
|
||||
try:
|
||||
wallet_id = wallet_file.stem
|
||||
# Try to read public key without decrypting
|
||||
with open(wallet_file, 'rb') as f:
|
||||
# This is simplified - in real implementation, we'd read metadata
|
||||
wallets.append({
|
||||
"wallet_id": wallet_id,
|
||||
"address": f"0x{wallet_id[:8]}...", # Simplified address format
|
||||
"path": str(wallet_file)
|
||||
})
|
||||
except Exception:
|
||||
continue
|
||||
return wallets
|
||||
|
||||
def get_balance(self, wallet_id: str, password: str) -> dict:
|
||||
"""Get wallet balance from blockchain"""
|
||||
# First unlock wallet to get public key
|
||||
wallet_path = self._get_wallet_path(wallet_id)
|
||||
|
||||
if not wallet_path.exists():
|
||||
return {"error": "Wallet not found"}
|
||||
|
||||
try:
|
||||
wallet_data = self.keystore.load_wallet(wallet_path, password)
|
||||
public_key = wallet_data["public_key"]
|
||||
|
||||
# Query blockchain for balance
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
response = client.get(
|
||||
f"{self.blockchain_rpc}/v1/balances/{public_key}",
|
||||
timeout=5.0
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return {"error": "Failed to query blockchain", "status": response.status_code}
|
||||
except Exception as e:
|
||||
return {"error": f"Cannot connect to blockchain: {str(e)}"}
|
||||
|
||||
except Exception as e:
|
||||
return {"error": f"Failed to unlock wallet: {str(e)}"}
|
||||
|
||||
def check_connection(self) -> dict:
|
||||
"""Check if connected to blockchain"""
|
||||
try:
|
||||
with httpx.Client() as client:
|
||||
# Try to get the latest block
|
||||
response = client.get(f"{self.blockchain_rpc}/v1/blocks/head", timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
block = response.json()
|
||||
return {
|
||||
"connected": True,
|
||||
"blockchain_url": self.blockchain_rpc,
|
||||
"latest_block": block.get("height", "unknown"),
|
||||
"status": "connected"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"connected": False,
|
||||
"error": f"HTTP {response.status_code}",
|
||||
"status": "disconnected"
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"connected": False,
|
||||
"error": str(e),
|
||||
"status": "disconnected"
|
||||
}
|
||||
|
||||
def send_transaction(self, wallet_id: str, password: str, to_address: str, amount: float) -> dict:
|
||||
"""Send transaction"""
|
||||
wallet_path = self._get_wallet_path(wallet_id)
|
||||
|
||||
if not wallet_path.exists():
|
||||
return {"error": "Wallet not found"}
|
||||
|
||||
try:
|
||||
# Unlock wallet
|
||||
wallet_data = self.keystore.load_wallet(wallet_path, password)
|
||||
private_key = wallet_data["private_key"]
|
||||
|
||||
# Create transaction
|
||||
transaction = {
|
||||
"from": wallet_data["public_key"],
|
||||
"to": to_address,
|
||||
"amount": amount,
|
||||
"nonce": 0 # Would get from blockchain
|
||||
}
|
||||
|
||||
# Sign transaction
|
||||
signature = self.keystore.sign_transaction(private_key, transaction)
|
||||
transaction["signature"] = signature
|
||||
|
||||
# Send to blockchain
|
||||
with httpx.Client() as client:
|
||||
response = client.post(
|
||||
f"{self.blockchain_rpc}/v1/transactions",
|
||||
json=transaction,
|
||||
timeout=5.0
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
return {"error": f"Failed to send transaction: {response.text}"}
|
||||
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
def main():
|
||||
"""Main CLI entry point"""
|
||||
parser = argparse.ArgumentParser(description="AITBC Blockchain Wallet CLI")
|
||||
parser.add_argument("--wallet-dir", default=None, help="Wallet directory path")
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
||||
|
||||
# Create wallet
|
||||
create_parser = subparsers.add_parser("create", help="Create a new wallet")
|
||||
create_parser.add_argument("wallet_id", help="Wallet identifier")
|
||||
create_parser.add_argument("password", help="Wallet password")
|
||||
|
||||
# List wallets
|
||||
subparsers.add_parser("list", help="List all wallets")
|
||||
|
||||
# Get balance
|
||||
balance_parser = subparsers.add_parser("balance", help="Get wallet balance")
|
||||
balance_parser.add_argument("wallet_id", help="Wallet identifier")
|
||||
balance_parser.add_argument("password", help="Wallet password")
|
||||
|
||||
# Check connection
|
||||
subparsers.add_parser("status", help="Check blockchain connection status")
|
||||
|
||||
# Send transaction
|
||||
send_parser = subparsers.add_parser("send", help="Send transaction")
|
||||
send_parser.add_argument("wallet_id", help="Wallet identifier")
|
||||
send_parser.add_argument("password", help="Wallet password")
|
||||
send_parser.add_argument("to_address", help="Recipient address")
|
||||
send_parser.add_argument("amount", type=float, help="Amount to send")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
wallet = AITBCWallet(args.wallet_dir)
|
||||
|
||||
if args.command == "create":
|
||||
result = wallet.create_wallet(args.wallet_id, args.password)
|
||||
if "error" in result:
|
||||
print(f"Error: {result['error']}", file=sys.stderr)
|
||||
else:
|
||||
print(f"Wallet created successfully!")
|
||||
print(f"Wallet ID: {result['wallet_id']}")
|
||||
print(f"Public Key: {result['public_key']}")
|
||||
|
||||
elif args.command == "list":
|
||||
wallets = wallet.list_wallets()
|
||||
if wallets:
|
||||
print("Available wallets:")
|
||||
for w in wallets:
|
||||
print(f" - {w['wallet_id']}: {w['address']}")
|
||||
else:
|
||||
print("No wallets found")
|
||||
|
||||
elif args.command == "balance":
|
||||
result = wallet.get_balance(args.wallet_id, args.password)
|
||||
if "error" in result:
|
||||
print(f"Error: {result['error']}", file=sys.stderr)
|
||||
else:
|
||||
print(f"Balance: {result.get('balance', 'unknown')}")
|
||||
|
||||
elif args.command == "status":
|
||||
result = wallet.check_connection()
|
||||
if result["connected"]:
|
||||
print(f"✓ Connected to blockchain at {result['blockchain_url']}")
|
||||
print(f" Latest block: {result['latest_block']}")
|
||||
else:
|
||||
print(f"✗ Not connected: {result['error']}")
|
||||
|
||||
elif args.command == "send":
|
||||
result = wallet.send_transaction(args.wallet_id, args.password, args.to_address, args.amount)
|
||||
if "error" in result:
|
||||
print(f"Error: {result['error']}", file=sys.stderr)
|
||||
else:
|
||||
print(f"Transaction sent: {result.get('tx_hash', 'unknown')}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,101 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple AITBC Wallet CLI
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
import httpx
|
||||
import getpass
|
||||
|
||||
def check_blockchain_connection():
|
||||
"""Check if connected to blockchain"""
|
||||
try:
|
||||
response = httpx.get("http://127.0.0.1:9080/rpc/head", timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
return True, data.get("height", "unknown")
|
||||
return False, f"HTTP {response.status_code}"
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def get_balance(address):
|
||||
"""Get balance for an address"""
|
||||
try:
|
||||
response = httpx.get(f"http://127.0.0.1:9080/rpc/getBalance/{address}", timeout=5.0)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return {"error": f"HTTP {response.status_code}"}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
def list_wallets():
|
||||
"""List local wallets"""
|
||||
wallet_dir = Path.home() / ".aitbc" / "wallets"
|
||||
wallet_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
wallets = []
|
||||
for wallet_file in wallet_dir.glob("*.json"):
|
||||
try:
|
||||
with open(wallet_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
wallets.append({
|
||||
"id": wallet_file.stem,
|
||||
"address": data.get("address", "unknown"),
|
||||
"public_key": data.get("public_key", "unknown")[:20] + "..."
|
||||
})
|
||||
except:
|
||||
continue
|
||||
return wallets
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="AITBC Wallet CLI")
|
||||
subparsers = parser.add_subparsers(dest="command", help="Commands")
|
||||
|
||||
# Status command
|
||||
subparsers.add_parser("status", help="Check blockchain connection")
|
||||
|
||||
# List command
|
||||
subparsers.add_parser("list", help="List wallets")
|
||||
|
||||
# Balance command
|
||||
balance_parser = subparsers.add_parser("balance", help="Get balance")
|
||||
balance_parser.add_argument("address", help="Wallet address")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "status":
|
||||
connected, info = check_blockchain_connection()
|
||||
if connected:
|
||||
print(f"✓ Connected to AITBC Blockchain")
|
||||
print(f" Latest block: {info}")
|
||||
print(f" Node: http://127.0.0.1:9080")
|
||||
else:
|
||||
print(f"✗ Not connected: {info}")
|
||||
|
||||
elif args.command == "list":
|
||||
wallets = list_wallets()
|
||||
if wallets:
|
||||
print("Local wallets:")
|
||||
for w in wallets:
|
||||
print(f" {w['id']}: {w['address']}")
|
||||
else:
|
||||
print("No wallets found")
|
||||
print(f"Wallet directory: {Path.home() / '.aitbc' / 'wallets'}")
|
||||
|
||||
elif args.command == "balance":
|
||||
result = get_balance(args.address)
|
||||
if "error" in result:
|
||||
print(f"Error: {result['error']}")
|
||||
else:
|
||||
balance = result.get("balance", 0)
|
||||
print(f"Balance: {balance} AITBC")
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user