docs: run automated documentation updates workflow

This commit is contained in:
oib
2026-03-03 20:48:51 +01:00
parent 0ebac91f45
commit f0c7cd321e
60 changed files with 678 additions and 81 deletions

View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python3
"""
Simple working coordinator API for GPU miner
"""
import logging
from fastapi import FastAPI, HTTPException, Header
from fastapi.middleware.cors import CORSMiddleware
from typing import Optional, Dict, Any
from pydantic import BaseModel
import time
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="AITBC Coordinator API - Working",
version="0.1.0",
description="Simple working coordinator service for GPU miner",
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allow_headers=["*"]
)
# Simple in-memory storage
miners: Dict[str, Dict[str, Any]] = {}
jobs: Dict[str, Dict[str, Any]] = {}
# Pydantic models
class MinerRegister(BaseModel):
miner_id: str
capabilities: list[str] = []
region: str = "default"
concurrency: int = 1
class MinerHeartbeat(BaseModel):
miner_id: str
status: str = "online"
inflight: int = 0
class JobSubmit(BaseModel):
prompt: str
model: str = "gemma3:1b"
priority: str = "normal"
# Basic auth (simple for testing)
API_KEY = "miner_test"
def verify_api_key(api_key: Optional[str] = Header(None), x_api_key: Optional[str] = Header(None)):
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
return key
@app.get("/health", tags=["health"], summary="Service healthcheck")
async def health() -> dict[str, str]:
"""Health check endpoint"""
return {"status": "ok", "service": "coordinator-api"}
@app.get("/v1/health", tags=["health"], summary="Service healthcheck")
async def health_v1() -> dict[str, str]:
"""Health check endpoint v1"""
return {"status": "ok", "service": "coordinator-api"}
@app.post("/v1/miners/register", tags=["miner"], summary="Register or update miner")
async def register_miner(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> dict[str, str]:
"""Register a miner"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# Get miner_id from query parameter or request body
mid = miner_id or request.get("miner_id", "miner_test")
# Register the miner with simple data
miners[mid] = {
"id": mid,
"capabilities": ["gpu"],
"region": request.get("region", "localhost"),
"concurrency": request.get("concurrency", 1),
"status": "online",
"inflight": 0,
"last_heartbeat": time.time(),
"session_token": f"token_{mid}_{int(time.time())}"
}
logger.info(f"Miner {mid} registered")
return {"status": "ok", "session_token": miners[mid]["session_token"]}
@app.post("/v1/miners/heartbeat", tags=["miner"], summary="Send miner heartbeat")
async def miner_heartbeat(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> dict[str, str]:
"""Receive miner heartbeat"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# Get miner_id from query parameter or request body
mid = miner_id or request.get("miner_id", "miner_test")
if mid not in miners:
raise HTTPException(status_code=404, detail="miner not registered")
miners[mid].update({
"status": request.get("status", "online"),
"inflight": request.get("current_jobs", 0),
"last_heartbeat": time.time()
})
return {"status": "ok"}
@app.post("/v1/miners/poll", tags=["miner"], summary="Poll for next job")
async def poll_for_job(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> Dict[str, Any]:
"""Poll for next job"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# For now, return no jobs (empty response)
return {"status": "no_jobs"}
@app.get("/", tags=["root"], summary="Root endpoint")
async def root() -> dict[str, str]:
"""Root endpoint"""
return {"service": "AITBC Coordinator API", "status": "running"}
if __name__ == "__main__":
import uvicorn
logger.info("Starting working coordinator API on port 9080")
uvicorn.run(app, host="127.0.0.1", port=9080)

38
dev/scripts/deploy_hotfix.sh Executable file
View File

@@ -0,0 +1,38 @@
#!/bin/bash
set -e
# Extract the update
cd /home/oib/aitbc
tar -xzf update.tar.gz
# Deploy to blockchain-node
echo "Deploying to blockchain-node..."
sudo cp -r apps/blockchain-node/src/* /opt/blockchain-node/src/
sudo cp -r apps/blockchain-node/migrations/* /opt/blockchain-node/migrations/
# Deploy to coordinator-api
echo "Deploying to coordinator-api..."
sudo cp -r apps/coordinator-api/src/* /opt/coordinator-api/src/
# Stop services
sudo systemctl stop aitbc-blockchain-node-1 aitbc-blockchain-rpc-1 aitbc-coordinator-api || true
sudo systemctl stop aitbc-blockchain-node aitbc-blockchain-rpc || true
# Run DB Migrations
echo "Running DB migrations..."
cd /opt/blockchain-node
# Drop the old database to be safe since it might have schema issues we fixed
sudo rm -f data/chain.db* data/blockchain.db* || true
sudo -u root PYTHONPATH=src:scripts .venv/bin/python -m alembic upgrade head
# Run Genesis
echo "Creating Genesis..."
cd /opt/blockchain-node
sudo -u root PYTHONPATH=src:scripts .venv/bin/python /home/oib/aitbc/dev/scripts/create_genesis_all.py
# Start services
echo "Restarting services..."
sudo systemctl restart aitbc-blockchain-node-1 aitbc-blockchain-rpc-1 aitbc-coordinator-api || true
sudo systemctl restart aitbc-blockchain-node aitbc-blockchain-rpc || true
echo "Done!"

View File

@@ -0,0 +1,57 @@
import os
import re
def replace_in_file(filepath, replacements):
with open(filepath, 'r') as f:
content = f.read()
modified = content
for old, new in replacements:
modified = modified.replace(old, new)
if modified != content:
with open(filepath, 'w') as f:
f.write(modified)
print(f"Fixed links in {filepath}")
# Fix docs/README.md
replace_in_file('docs/README.md', [
('../3_miners/1_quick-start.md', '3_miners/1_quick-start.md'),
('../2_clients/1_quick-start.md', '2_clients/1_quick-start.md'),
('../8_development/', '8_development/'),
('../11_agents/', '11_agents/'),
('../cli/README.md', '../cli/README.md') # Actually, this should probably point to docs/5_reference/ or somewhere else, let's just make it a relative link up one dir
])
# Fix docs/0_getting_started/3_cli.md
replace_in_file('docs/0_getting_started/3_cli.md', [
('../11_agents/swarm/', '../11_agents/swarm.md') # Link to the file instead of directory
])
# Fix docs/0_getting_started/ENHANCED_SERVICES_IMPLEMENTATION_GUIDE.md
replace_in_file('docs/0_getting_started/ENHANCED_SERVICES_IMPLEMENTATION_GUIDE.md', [
('docs/', '../')
])
# Fix docs/18_explorer/EXPLORER_FINAL_STATUS.md
replace_in_file('docs/18_explorer/EXPLORER_FINAL_STATUS.md', [
('../apps/blockchain-explorer/README.md', '../../apps/blockchain-explorer/README.md')
])
# Fix docs/20_phase_reports/COMPREHENSIVE_GUIDE.md
replace_in_file('docs/20_phase_reports/COMPREHENSIVE_GUIDE.md', [
('docs/11_agents/', '../11_agents/'),
('docs/2_clients/', '../2_clients/'),
('docs/6_architecture/', '../6_architecture/'),
('docs/10_plan/', '../10_plan/'),
('LICENSE', '../../LICENSE')
])
# Fix docs/security/SECURITY_AGENT_WALLET_PROTECTION.md
replace_in_file('docs/security/SECURITY_AGENT_WALLET_PROTECTION.md', [
('../docs/SECURITY_ARCHITECTURE.md', 'SECURITY_ARCHITECTURE.md'),
('../docs/SMART_CONTRACT_SECURITY.md', 'SMART_CONTRACT_SECURITY.md'),
('../docs/AGENT_DEVELOPMENT.md', '../11_agents/AGENT_DEVELOPMENT.md')
])
print("Finished fixing broken links")

View File

@@ -0,0 +1,45 @@
import os
def replace_in_file(filepath, replacements):
try:
with open(filepath, 'r') as f:
content = f.read()
modified = content
for old, new in replacements:
modified = modified.replace(old, new)
if modified != content:
with open(filepath, 'w') as f:
f.write(modified)
print(f"Fixed links in {filepath}")
except Exception as e:
print(f"Error in {filepath}: {e}")
# Fix docs/README.md
replace_in_file('docs/README.md', [
('../cli/README.md', '0_getting_started/3_cli.md')
])
# Fix docs/8_development/DEVELOPMENT_GUIDELINES.md
replace_in_file('docs/8_development/DEVELOPMENT_GUIDELINES.md', [
('../.windsurf/workflows/project-organization.md', '../../.windsurf/workflows/project-organization.md'),
('../.windsurf/workflows/file-organization-prevention.md', '../../.windsurf/workflows/file-organization-prevention.md')
])
# Fix docs/20_phase_reports/COMPREHENSIVE_GUIDE.md
replace_in_file('docs/20_phase_reports/COMPREHENSIVE_GUIDE.md', [
('../11_agents/marketplace/', '../11_agents/README.md'),
('../11_agents/swarm/', '../11_agents/README.md'),
('../11_agents/development/', '../11_agents/README.md'),
('../10_plan/multi-language-apis-completed.md', '../12_issues/multi-language-apis-completed.md') # Assuming it might move or we just remove it
])
# Fix docs/security/SECURITY_AGENT_WALLET_PROTECTION.md
replace_in_file('docs/security/SECURITY_AGENT_WALLET_PROTECTION.md', [
('SECURITY_ARCHITECTURE.md', 'SECURITY_OVERVIEW.md'), # If it exists
('SMART_CONTRACT_SECURITY.md', 'README.md'),
('../11_agents/AGENT_DEVELOPMENT.md', '../11_agents/README.md')
])
print("Finished fixing broken links 2")

View File

@@ -0,0 +1,25 @@
--- a/apps/blockchain-node/src/aitbc_chain/database.py
+++ b/apps/blockchain-node/src/aitbc_chain/database.py
@@ -3,11 +3,22 @@
from contextlib import contextmanager
from sqlmodel import Session, SQLModel, create_engine
+from sqlalchemy import event
from .config import settings
_engine = create_engine(f"sqlite:///{settings.db_path}", echo=False)
+@event.listens_for(_engine, "connect")
+def set_sqlite_pragma(dbapi_connection, connection_record):
+ cursor = dbapi_connection.cursor()
+ cursor.execute("PRAGMA journal_mode=WAL")
+ cursor.execute("PRAGMA synchronous=NORMAL")
+ cursor.execute("PRAGMA cache_size=-64000")
+ cursor.execute("PRAGMA temp_store=MEMORY")
+ cursor.execute("PRAGMA mmap_size=30000000000")
+ cursor.execute("PRAGMA busy_timeout=5000")
+ cursor.close()
def init_db() -> None:
settings.db_path.parent.mkdir(parents=True, exist_ok=True)

View File

@@ -0,0 +1,20 @@
--- a/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
+++ b/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
@@ -171,7 +171,7 @@
)
# Broadcast the new block
- gossip_broker.publish(
+ await gossip_broker.publish(
"blocks",
{
"height": block.height,
@@ -207,7 +207,7 @@
session.commit()
# Broadcast genesis block for initial sync
- gossip_broker.publish(
+ await gossip_broker.publish(
"blocks",
{
"height": genesis.height,

View File

@@ -0,0 +1,11 @@
--- a/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
+++ b/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
@@ -194,7 +194,7 @@
except Exception as e:
logger.error(f"Failed to propose block: {e}")
- def _ensure_genesis_block(self) -> None:
+ async def _ensure_genesis_block(self) -> None:
"""Ensure genesis block exists"""
with self.session_factory() as session:
if session.exec(select(Block).where(Block.height == 0)).first():

View File

@@ -0,0 +1,11 @@
--- a/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
+++ b/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
@@ -101,7 +101,7 @@
# Wait for interval before proposing next block
await asyncio.sleep(self.config.interval_seconds)
- self._propose_block()
+ await self._propose_block()
except asyncio.CancelledError:
pass

View File

@@ -0,0 +1,11 @@
--- a/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
+++ b/apps/blockchain-node/src/aitbc_chain/consensus/poa.py
@@ -81,7 +81,7 @@
if self._task is not None:
return
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
- self._ensure_genesis_block()
+ await self._ensure_genesis_block()
self._stop_event.clear()
self._task = asyncio.create_task(self._run_loop())

View File

@@ -0,0 +1,65 @@
import re
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "r") as f:
content = f.read()
if "from .gossip import gossip_broker" not in content:
content = content.replace(
"from .logger import get_logger",
"from .logger import get_logger\nfrom .gossip import gossip_broker\nfrom .sync import ChainSync"
)
if "_setup_gossip_subscribers" not in content:
content = content.replace(
""" async def start(self) -> None:""",
""" async def _setup_gossip_subscribers(self) -> None:
# Transactions
tx_sub = await gossip_broker.subscribe("transactions")
async def process_txs():
from .mempool import get_mempool
mempool = get_mempool()
while True:
try:
tx_data = await tx_sub.queue.get()
if isinstance(tx_data, str):
import json
tx_data = json.loads(tx_data)
chain_id = tx_data.get("chain_id", "ait-devnet")
mempool.add(tx_data, chain_id=chain_id)
except Exception as exc:
logger.error(f"Error processing transaction from gossip: {exc}")
asyncio.create_task(process_txs())
# Blocks
block_sub = await gossip_broker.subscribe("blocks")
async def process_blocks():
while True:
try:
block_data = await block_sub.queue.get()
if isinstance(block_data, str):
import json
block_data = json.loads(block_data)
chain_id = block_data.get("chain_id", "ait-devnet")
sync = ChainSync(session_factory=session_scope, chain_id=chain_id)
sync.import_block(block_data)
except Exception as exc:
logger.error(f"Error processing block from gossip: {exc}")
asyncio.create_task(process_blocks())
async def start(self) -> None:"""
)
content = content.replace(
""" self._start_proposers()
try:""",
""" self._start_proposers()
await self._setup_gossip_subscribers()
try:"""
)
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "w") as f:
f.write(content)

View File

@@ -0,0 +1,54 @@
import re
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "r") as f:
content = f.read()
if "from .gossip import gossip_broker, create_backend" not in content:
content = content.replace(
"from .gossip import gossip_broker",
"from .gossip import gossip_broker, create_backend"
)
content = content.replace(
""" async def start(self) -> None:
logger.info("Starting blockchain node", extra={"supported_chains": getattr(settings, 'supported_chains', settings.chain_id)})
init_db()
init_mempool(
backend=settings.mempool_backend,
db_path=str(settings.db_path.parent / "mempool.db"),
max_size=settings.mempool_max_size,
min_fee=settings.min_fee,
)""",
""" async def start(self) -> None:
logger.info("Starting blockchain node", extra={"supported_chains": getattr(settings, 'supported_chains', settings.chain_id)})
# Initialize Gossip Backend
backend = create_backend(
settings.gossip_backend,
broadcast_url=settings.gossip_broadcast_url,
)
await gossip_broker.set_backend(backend)
init_db()
init_mempool(
backend=settings.mempool_backend,
db_path=str(settings.db_path.parent / "mempool.db"),
max_size=settings.mempool_max_size,
min_fee=settings.min_fee,
)"""
)
content = content.replace(
""" async def _shutdown(self) -> None:
for chain_id, proposer in list(self._proposers.items()):
await proposer.stop()
self._proposers.clear()""",
""" async def _shutdown(self) -> None:
for chain_id, proposer in list(self._proposers.items()):
await proposer.stop()
self._proposers.clear()
await gossip_broker.shutdown()"""
)
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "w") as f:
f.write(content)

View File

@@ -0,0 +1,15 @@
import re
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "r") as f:
content = f.read()
content = content.replace(
""" block_data = await block_sub.queue.get()
if isinstance(block_data, str):""",
""" block_data = await block_sub.queue.get()
logger.info(f"Received block from gossip")
if isinstance(block_data, str):"""
)
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "w") as f:
f.write(content)

View File

@@ -0,0 +1,30 @@
import re
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "r") as f:
content = f.read()
content = content.replace(
""" block_data = await block_sub.queue.get()
logger.info(f"Received block from gossip")
if isinstance(block_data, str):
import json
block_data = json.loads(block_data)
chain_id = block_data.get("chain_id", "ait-devnet")
sync = ChainSync(session_factory=session_scope, chain_id=chain_id)
sync.import_block(block_data)
except Exception as exc:""",
""" block_data = await block_sub.queue.get()
logger.info(f"Received block from gossip")
if isinstance(block_data, str):
import json
block_data = json.loads(block_data)
chain_id = block_data.get("chain_id", "ait-devnet")
logger.info(f"Importing block for chain {chain_id}: {block_data.get('height')}")
sync = ChainSync(session_factory=session_scope, chain_id=chain_id)
res = sync.import_block(block_data)
logger.info(f"Import result: accepted={res.accepted}, reason={res.reason}")
except Exception as exc:"""
)
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/main.py", "w") as f:
f.write(content)

View File

@@ -0,0 +1,63 @@
import re
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/consensus/poa.py", "r") as f:
content = f.read()
content = content.replace(
""" await gossip_broker.publish(
"blocks",
{
"height": block.height,
"hash": block.hash,
"parent_hash": block.parent_hash,
"proposer": block.proposer,
"timestamp": block.timestamp.isoformat(),
"tx_count": block.tx_count,
"state_root": block.state_root,
}
)""",
""" await gossip_broker.publish(
"blocks",
{
"chain_id": self._config.chain_id,
"height": block.height,
"hash": block.hash,
"parent_hash": block.parent_hash,
"proposer": block.proposer,
"timestamp": block.timestamp.isoformat(),
"tx_count": block.tx_count,
"state_root": block.state_root,
}
)"""
)
content = content.replace(
""" await gossip_broker.publish(
"blocks",
{
"height": genesis.height,
"hash": genesis.hash,
"parent_hash": genesis.parent_hash,
"proposer": genesis.proposer,
"timestamp": genesis.timestamp.isoformat(),
"tx_count": genesis.tx_count,
"state_root": genesis.state_root,
}
)""",
""" await gossip_broker.publish(
"blocks",
{
"chain_id": self._config.chain_id,
"height": genesis.height,
"hash": genesis.hash,
"parent_hash": genesis.parent_hash,
"proposer": genesis.proposer,
"timestamp": genesis.timestamp.isoformat(),
"tx_count": genesis.tx_count,
"state_root": genesis.state_root,
}
)"""
)
with open("/home/oib/windsurf/aitbc/apps/blockchain-node/src/aitbc_chain/consensus/poa.py", "w") as f:
f.write(content)