Compare commits
19 Commits
b61843c870
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e01754b36 | ||
|
|
da05c5f50f | ||
|
|
bc0e17cf73 | ||
|
|
88db347df8 | ||
|
|
ca7da25b9d | ||
|
|
96fe4ca9af | ||
|
|
4d54414f0b | ||
|
|
f57a8b2cc2 | ||
|
|
5c09774e06 | ||
|
|
9bf38e1662 | ||
|
|
86baaba44f | ||
|
|
89d1613bd8 | ||
|
|
40ddf89b9c | ||
|
|
ef4a1c0e87 | ||
|
|
18264f6acd | ||
|
|
acbe68ef42 | ||
|
|
346f2d340d | ||
|
|
7035f09a8c | ||
|
|
08f3253e4e |
@@ -1,50 +0,0 @@
|
|||||||
consensus:started:1775124269
|
|
||||||
consensus:failed:1775124272
|
|
||||||
network:started:1775124272
|
|
||||||
network:failed:1775124272
|
|
||||||
economics:started:1775124272
|
|
||||||
economics:failed:1775124272
|
|
||||||
agents:started:1775124272
|
|
||||||
agents:failed:1775124272
|
|
||||||
contracts:started:1775124272
|
|
||||||
contracts:failed:1775124272
|
|
||||||
consensus:started:1775124349
|
|
||||||
consensus:failed:1775124351
|
|
||||||
network:started:1775124351
|
|
||||||
network:completed:1775124352
|
|
||||||
economics:started:1775124353
|
|
||||||
economics:failed:1775124354
|
|
||||||
agents:started:1775124354
|
|
||||||
agents:failed:1775124354
|
|
||||||
contracts:started:1775124354
|
|
||||||
contracts:failed:1775124355
|
|
||||||
consensus:started:1775124364
|
|
||||||
consensus:failed:1775124365
|
|
||||||
network:started:1775124365
|
|
||||||
network:completed:1775124366
|
|
||||||
economics:started:1775124366
|
|
||||||
economics:failed:1775124368
|
|
||||||
agents:started:1775124368
|
|
||||||
agents:failed:1775124368
|
|
||||||
contracts:started:1775124368
|
|
||||||
contracts:failed:1775124369
|
|
||||||
consensus:started:1775124518
|
|
||||||
consensus:failed:1775124520
|
|
||||||
network:started:1775124520
|
|
||||||
network:completed:1775124521
|
|
||||||
economics:started:1775124521
|
|
||||||
economics:failed:1775124522
|
|
||||||
agents:started:1775124522
|
|
||||||
agents:failed:1775124522
|
|
||||||
contracts:started:1775124522
|
|
||||||
contracts:failed:1775124524
|
|
||||||
consensus:started:1775124560
|
|
||||||
consensus:failed:1775124561
|
|
||||||
network:started:1775124561
|
|
||||||
network:completed:1775124563
|
|
||||||
economics:started:1775124563
|
|
||||||
economics:failed:1775124564
|
|
||||||
agents:started:1775124564
|
|
||||||
agents:failed:1775124564
|
|
||||||
contracts:started:1775124564
|
|
||||||
contracts:failed:1775124566
|
|
||||||
106
.gitignore
vendored
106
.gitignore
vendored
@@ -1,11 +1,13 @@
|
|||||||
# AITBC Monorepo ignore rules
|
# AITBC Monorepo ignore rules
|
||||||
# Updated: 2026-03-18 - Security fixes for hardcoded passwords
|
# Updated: 2026-04-02 - Project reorganization and security fixes
|
||||||
# Development files organized into dev/ subdirectories
|
# Development files organized into subdirectories
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# Python
|
# Python
|
||||||
# ===================
|
# ===================
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
*/__pycache__/
|
||||||
|
**/__pycache__/
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
*.pyd
|
*.pyd
|
||||||
@@ -105,14 +107,42 @@ target/
|
|||||||
*.dylib
|
*.dylib
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# Secrets & Credentials (CRITICAL SECURITY)
|
# Node.js & npm
|
||||||
# ===================
|
|
||||||
# ===================
|
# ===================
|
||||||
node_modules/
|
node_modules/
|
||||||
npm-debug.log*
|
npm-debug.log*
|
||||||
yarn-debug.log*
|
yarn-debug.log*
|
||||||
yarn-error.log*
|
yarn-error.log*
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Project Configuration (moved to project-config/)
|
||||||
|
# ===================
|
||||||
|
project-config/.deployment_progress
|
||||||
|
project-config/.last_backup
|
||||||
|
project-config/=*
|
||||||
|
# requirements.txt, pyproject.toml, and poetry.lock are now at root level
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Documentation (moved to docs/)
|
||||||
|
# ===================
|
||||||
|
docs/AITBC1_*.md
|
||||||
|
docs/PYTHON_VERSION_STATUS.md
|
||||||
|
docs/SETUP.md
|
||||||
|
docs/README_DOCUMENTATION.md
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Security Reports (moved to security/)
|
||||||
|
# ===================
|
||||||
|
security/SECURITY_*.md
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Backup Configuration (moved to backup-config/)
|
||||||
|
# ===================
|
||||||
|
backup-config/*.backup
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Secrets & Credentials (CRITICAL SECURITY)
|
||||||
|
# ===================
|
||||||
# Password files (NEVER commit these)
|
# Password files (NEVER commit these)
|
||||||
*.password
|
*.password
|
||||||
*.pass
|
*.pass
|
||||||
@@ -129,6 +159,9 @@ private_key.*
|
|||||||
# ===================
|
# ===================
|
||||||
# Backup Files (organized)
|
# Backup Files (organized)
|
||||||
# ===================
|
# ===================
|
||||||
|
backups/
|
||||||
|
backups/*
|
||||||
|
backups/**/*
|
||||||
backup/**/*.tmp
|
backup/**/*.tmp
|
||||||
backup/**/*.temp
|
backup/**/*.temp
|
||||||
backup/**/.DS_Store
|
backup/**/.DS_Store
|
||||||
@@ -167,7 +200,8 @@ temp/
|
|||||||
# ===================
|
# ===================
|
||||||
# Wallet Files (contain private keys)
|
# Wallet Files (contain private keys)
|
||||||
# ===================
|
# ===================
|
||||||
# Specific wallet and private key JSON files (contain private keys)
|
wallet*.json
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# Project Specific
|
# Project Specific
|
||||||
# ===================
|
# ===================
|
||||||
@@ -184,6 +218,7 @@ apps/explorer-web/dist/
|
|||||||
packages/solidity/aitbc-token/typechain-types/
|
packages/solidity/aitbc-token/typechain-types/
|
||||||
packages/solidity/aitbc-token/artifacts/
|
packages/solidity/aitbc-token/artifacts/
|
||||||
packages/solidity/aitbc-token/cache/
|
packages/solidity/aitbc-token/cache/
|
||||||
|
packages/solidity/aitbc-token/node_modules/
|
||||||
|
|
||||||
# Local test fixtures and E2E testing
|
# Local test fixtures and E2E testing
|
||||||
tests/e2e/fixtures/home/**/.aitbc/cache/
|
tests/e2e/fixtures/home/**/.aitbc/cache/
|
||||||
@@ -202,6 +237,7 @@ tests/e2e/fixtures/home/**/.aitbc/*.sock
|
|||||||
|
|
||||||
# Local test data
|
# Local test data
|
||||||
tests/fixtures/generated/
|
tests/fixtures/generated/
|
||||||
|
tests/__pycache__/
|
||||||
|
|
||||||
# GPU miner local configs
|
# GPU miner local configs
|
||||||
scripts/gpu/*.local.py
|
scripts/gpu/*.local.py
|
||||||
@@ -222,8 +258,8 @@ docs/1_project/4_currentissue.md
|
|||||||
# ===================
|
# ===================
|
||||||
# Website (local deployment details)
|
# Website (local deployment details)
|
||||||
# ===================
|
# ===================
|
||||||
website/README.md
|
website/README.md.example
|
||||||
website/aitbc-proxy.conf
|
website/aitbc-proxy.conf.example
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# Local Config & Secrets
|
# Local Config & Secrets
|
||||||
@@ -248,31 +284,14 @@ infra/helm/values/prod/
|
|||||||
infra/helm/values/prod.yaml
|
infra/helm/values/prod.yaml
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# Node.js
|
|
||||||
# ===================
|
|
||||||
node_modules/
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
|
|
||||||
# Build artifacts
|
|
||||||
build/
|
|
||||||
dist/
|
|
||||||
target/
|
|
||||||
|
|
||||||
# System files
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
*.pid.lock
|
|
||||||
|
|
||||||
# Coverage reports
|
# Coverage reports
|
||||||
|
# ===================
|
||||||
htmlcov/
|
htmlcov/
|
||||||
.coverage
|
.coverage
|
||||||
.coverage.*
|
.coverage.*
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
|
||||||
|
|
||||||
# Jupyter Notebook
|
# Jupyter Notebook
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
@@ -280,56 +299,31 @@ coverage.xml
|
|||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# Environments
|
|
||||||
.venv
|
|
||||||
env/
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env.bak/
|
|
||||||
venv.bak/
|
|
||||||
|
|
||||||
# ===================
|
# ===================
|
||||||
# AITBC specific (CRITICAL SECURITY)
|
# AITBC specific (CRITICAL SECURITY)
|
||||||
# ===================
|
# ===================
|
||||||
data/
|
|
||||||
logs/
|
|
||||||
*.db
|
|
||||||
*.sqlite
|
|
||||||
wallet*.json
|
|
||||||
certificates/
|
certificates/
|
||||||
|
|
||||||
# Guardian contract databases (contain spending limits)
|
|
||||||
guardian_contracts/
|
guardian_contracts/
|
||||||
*.guardian.db
|
*.guardian.db
|
||||||
|
|
||||||
# Multi-chain wallet data
|
|
||||||
.wallets/
|
.wallets/
|
||||||
.wallets/*
|
.wallets/*
|
||||||
|
|
||||||
# Agent protocol data
|
|
||||||
.agent_data/
|
.agent_data/
|
||||||
.agent_data/*
|
.agent_data/*
|
||||||
|
|
||||||
# Operational and setup files
|
|
||||||
results/
|
results/
|
||||||
tools/
|
tools/
|
||||||
data/
|
|
||||||
*.db
|
|
||||||
*.log
|
|
||||||
production/data/
|
production/data/
|
||||||
production/logs/
|
production/logs/
|
||||||
config/
|
config/
|
||||||
*.env
|
|
||||||
api_keys.txt
|
api_keys.txt
|
||||||
*.yaml
|
*.yaml
|
||||||
!*.example
|
!*.example
|
||||||
logs/
|
|
||||||
production/logs/
|
|
||||||
*.log
|
|
||||||
*.log.*
|
|
||||||
production/data/
|
|
||||||
production/logs/
|
|
||||||
dev/cache/logs/
|
dev/cache/logs/
|
||||||
dev/test-nodes/*/data/
|
dev/test-nodes/*/data/
|
||||||
backups/*/config/
|
backups/*/config/
|
||||||
backups/*/logs/
|
backups/*/logs/
|
||||||
|
|
||||||
|
# ===================
|
||||||
|
# Monitoring & Systemd
|
||||||
|
# ===================
|
||||||
|
monitoring/*.pid
|
||||||
|
systemd/*.backup
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
/opt/aitbc/backups/pre_deployment_20260402_120920
|
|
||||||
@@ -189,7 +189,7 @@ sudo systemctl start aitbc-blockchain-node-production.service
|
|||||||
**Quick Start**:
|
**Quick Start**:
|
||||||
```bash
|
```bash
|
||||||
# Create marketplace service
|
# Create marketplace service
|
||||||
./aitbc-cli marketplace --action create --name "AI Service" --price 100 --wallet provider
|
./aitbc-cli market create --type ai-inference --price 100 --description "AI Service" --wallet provider
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -297,10 +297,10 @@ curl -s http://localhost:8006/health | jq .
|
|||||||
curl -s http://localhost:8006/rpc/head | jq .height
|
curl -s http://localhost:8006/rpc/head | jq .height
|
||||||
|
|
||||||
# List wallets
|
# List wallets
|
||||||
./aitbc-cli list
|
./aitbc-cli wallet list
|
||||||
|
|
||||||
# Send transaction
|
# Send transaction
|
||||||
./aitbc-cli send --from wallet1 --to wallet2 --amount 100 --password 123
|
./aitbc-cli wallet send wallet1 wallet2 100 123
|
||||||
```
|
```
|
||||||
|
|
||||||
### Operations Commands (From Operations Module)
|
### Operations Commands (From Operations Module)
|
||||||
@@ -342,10 +342,10 @@ curl -s http://localhost:9090/metrics
|
|||||||
### Marketplace Commands (From Marketplace Module)
|
### Marketplace Commands (From Marketplace Module)
|
||||||
```bash
|
```bash
|
||||||
# Create service
|
# Create service
|
||||||
./aitbc-cli marketplace --action create --name "Service" --price 100 --wallet provider
|
./aitbc-cli market create --type ai-inference --price 100 --description "Service" --wallet provider
|
||||||
|
|
||||||
# Submit AI job
|
# Submit AI job
|
||||||
./aitbc-cli ai-submit --wallet wallet --type inference --prompt "Generate image" --payment 100
|
./aitbc-cli ai submit --wallet wallet --type inference --prompt "Generate image" --payment 100
|
||||||
|
|
||||||
# Check resource status
|
# Check resource status
|
||||||
./aitbc-cli resource status
|
./aitbc-cli resource status
|
||||||
|
|||||||
@@ -95,8 +95,8 @@ openclaw agent --agent FollowerAgent --session-id test --message "Test response"
|
|||||||
**Quick Start**:
|
**Quick Start**:
|
||||||
```bash
|
```bash
|
||||||
# Test AI operations
|
# Test AI operations
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type inference --prompt "Test AI job" --payment 100
|
./aitbc-cli ai submit --wallet genesis-ops --type inference --prompt "Test AI job" --payment 100
|
||||||
./aitbc-cli ai-ops --action status --job-id latest
|
./aitbc-cli ai status --job-id latest
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -117,8 +117,8 @@ openclaw agent --agent FollowerAgent --session-id test --message "Test response"
|
|||||||
**Quick Start**:
|
**Quick Start**:
|
||||||
```bash
|
```bash
|
||||||
# Test advanced AI operations
|
# Test advanced AI operations
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type parallel --prompt "Complex pipeline test" --payment 500
|
./aitbc-cli ai submit --wallet genesis-ops --type parallel --prompt "Complex pipeline test" --payment 500
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type multimodal --prompt "Multi-modal test" --payment 1000
|
./aitbc-cli ai submit --wallet genesis-ops --type multimodal --prompt "Multi-modal test" --payment 1000
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -139,7 +139,7 @@ openclaw agent --agent FollowerAgent --session-id test --message "Test response"
|
|||||||
**Quick Start**:
|
**Quick Start**:
|
||||||
```bash
|
```bash
|
||||||
# Test cross-node operations
|
# Test cross-node operations
|
||||||
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli chain'
|
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli blockchain info'
|
||||||
./aitbc-cli resource status
|
./aitbc-cli resource status
|
||||||
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli resource status'
|
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli resource status'
|
||||||
```
|
```
|
||||||
@@ -223,16 +223,16 @@ test-basic.md (foundation)
|
|||||||
### 🚀 Quick Test Commands
|
### 🚀 Quick Test Commands
|
||||||
```bash
|
```bash
|
||||||
# Basic functionality test
|
# Basic functionality test
|
||||||
./aitbc-cli --version && ./aitbc-cli chain
|
./aitbc-cli --version && ./aitbc-cli blockchain info
|
||||||
|
|
||||||
# OpenClaw agent test
|
# OpenClaw agent test
|
||||||
openclaw agent --agent GenesisAgent --session-id quick-test --message "Quick test" --thinking low
|
openclaw agent --agent GenesisAgent --session-id quick-test --message "Quick test" --thinking low
|
||||||
|
|
||||||
# AI operations test
|
# AI operations test
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type inference --prompt "Quick test" --payment 50
|
./aitbc-cli ai submit --wallet genesis-ops --type inference --prompt "Quick test" --payment 50
|
||||||
|
|
||||||
# Cross-node test
|
# Cross-node test
|
||||||
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli chain'
|
ssh aitbc1 'cd /opt/aitbc && ./aitbc-cli blockchain info'
|
||||||
|
|
||||||
# Performance test
|
# Performance test
|
||||||
./aitbc-cli simulate blockchain --blocks 10 --transactions 50 --delay 0
|
./aitbc-cli simulate blockchain --blocks 10 --transactions 50 --delay 0
|
||||||
|
|||||||
@@ -25,77 +25,69 @@ This module covers marketplace scenario testing, GPU provider testing, transacti
|
|||||||
cd /opt/aitbc && source venv/bin/activate
|
cd /opt/aitbc && source venv/bin/activate
|
||||||
|
|
||||||
# Create marketplace service provider wallet
|
# Create marketplace service provider wallet
|
||||||
./aitbc-cli create --name marketplace-provider --password 123
|
./aitbc-cli wallet create marketplace-provider 123
|
||||||
|
|
||||||
# Fund marketplace provider wallet
|
# Fund marketplace provider wallet
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "marketplace-provider:" | cut -d" " -f2) --amount 10000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "marketplace-provider:" | cut -d" " -f2) 10000 123
|
||||||
|
|
||||||
# Create AI service provider wallet
|
# Create AI service provider wallet
|
||||||
./aitbc-cli create --name ai-service-provider --password 123
|
./aitbc-cli wallet create ai-service-provider 123
|
||||||
|
|
||||||
# Fund AI service provider wallet
|
# Fund AI service provider wallet
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "ai-service-provider:" | cut -d" " -f2) --amount 5000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "ai-service-provider:" | cut -d" " -f2) 5000 123
|
||||||
|
|
||||||
# Create GPU provider wallet
|
# Create GPU provider wallet
|
||||||
./aitbc-cli create --name gpu-provider --password 123
|
./aitbc-cli wallet create gpu-provider 123
|
||||||
|
|
||||||
# Fund GPU provider wallet
|
# Fund GPU provider wallet
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "gpu-provider:" | cut -d" " -f2) --amount 5000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "gpu-provider:" | cut -d" " -f2) 5000 123
|
||||||
```
|
```
|
||||||
|
|
||||||
### Create Marketplace Services
|
### Create Marketplace Services
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create AI inference service
|
# Create AI inference service
|
||||||
./aitbc-cli marketplace --action create \
|
./aitbc-cli market create \
|
||||||
--name "AI Image Generation Service" \
|
|
||||||
--type ai-inference \
|
--type ai-inference \
|
||||||
--price 100 \
|
--price 100 \
|
||||||
--wallet marketplace-provider \
|
--wallet marketplace-provider \
|
||||||
--description "High-quality image generation using advanced AI models" \
|
--description "High-quality image generation using advanced AI models"
|
||||||
--parameters "resolution:512x512,style:photorealistic,quality:high"
|
|
||||||
|
|
||||||
# Create AI training service
|
# Create AI training service
|
||||||
./aitbc-cli marketplace --action create \
|
./aitbc-cli market create \
|
||||||
--name "Custom Model Training Service" \
|
|
||||||
--type ai-training \
|
--type ai-training \
|
||||||
--price 500 \
|
--price 500 \
|
||||||
--wallet ai-service-provider \
|
--wallet ai-service-provider \
|
||||||
--description "Custom AI model training on your datasets" \
|
--description "Custom AI model training on your datasets"
|
||||||
--parameters "model_type:custom,epochs:100,batch_size:32"
|
|
||||||
|
|
||||||
# Create GPU rental service
|
# Create GPU rental service
|
||||||
./aitbc-cli marketplace --action create \
|
./aitbc-cli market create \
|
||||||
--name "GPU Cloud Computing" \
|
|
||||||
--type gpu-rental \
|
--type gpu-rental \
|
||||||
--price 50 \
|
--price 50 \
|
||||||
--wallet gpu-provider \
|
--wallet gpu-provider \
|
||||||
--description "High-performance GPU rental for AI workloads" \
|
--description "High-performance GPU rental for AI workloads"
|
||||||
--parameters "gpu_type:rtx4090,memory:24gb,bandwidth:high"
|
|
||||||
|
|
||||||
# Create data processing service
|
# Create data processing service
|
||||||
./aitbc-cli marketplace --action create \
|
./aitbc-cli market create \
|
||||||
--name "Data Analysis Pipeline" \
|
|
||||||
--type data-processing \
|
--type data-processing \
|
||||||
--price 25 \
|
--price 25 \
|
||||||
--wallet marketplace-provider \
|
--wallet marketplace-provider \
|
||||||
--description "Automated data analysis and processing" \
|
--description "Automated data analysis and processing"
|
||||||
--parameters "data_format:csv,json,xml,output_format:reports"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Verify Marketplace Services
|
### Verify Marketplace Services
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# List all marketplace services
|
# List all marketplace services
|
||||||
./aitbc-cli marketplace --action list
|
./aitbc-cli market list
|
||||||
|
|
||||||
# Check service details
|
# Check service details
|
||||||
./aitbc-cli marketplace --action search --query "AI"
|
./aitbc-cli market search --query "AI"
|
||||||
|
|
||||||
# Verify provider listings
|
# Verify provider listings
|
||||||
./aitbc-cli marketplace --action my-listings --wallet marketplace-provider
|
./aitbc-cli market my-listings --wallet marketplace-provider
|
||||||
./aitbc-cli marketplace --action my-listings --wallet ai-service-provider
|
./aitbc-cli market my-listings --wallet ai-service-provider
|
||||||
./aitbc-cli marketplace --action my-listings --wallet gpu-provider
|
./aitbc-cli market my-listings --wallet gpu-provider
|
||||||
```
|
```
|
||||||
|
|
||||||
## Scenario Testing
|
## Scenario Testing
|
||||||
@@ -104,88 +96,88 @@ cd /opt/aitbc && source venv/bin/activate
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Customer creates wallet and funds it
|
# Customer creates wallet and funds it
|
||||||
./aitbc-cli create --name customer-1 --password 123
|
./aitbc-cli wallet create customer-1 123
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "customer-1:" | cut -d" " -f2) --amount 1000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "customer-1:" | cut -d" " -f2) 1000 123
|
||||||
|
|
||||||
# Customer browses marketplace
|
# Customer browses marketplace
|
||||||
./aitbc-cli marketplace --action search --query "image generation"
|
./aitbc-cli market search --query "image generation"
|
||||||
|
|
||||||
# Customer bids on AI image generation service
|
# Customer bids on AI image generation service
|
||||||
SERVICE_ID=$(./aitbc-cli marketplace --action search --query "AI Image Generation" | grep "service_id" | head -1 | cut -d" " -f2)
|
SERVICE_ID=$(./aitbc-cli market search --query "AI Image Generation" | grep "service_id" | head -1 | cut -d" " -f2)
|
||||||
./aitbc-cli marketplace --action bid --service-id $SERVICE_ID --amount 120 --wallet customer-1
|
./aitbc-cli market bid --service-id $SERVICE_ID --amount 120 --wallet customer-1
|
||||||
|
|
||||||
# Service provider accepts bid
|
# Service provider accepts bid
|
||||||
./aitbc-cli marketplace --action accept-bid --service-id $SERVICE_ID --bid-id "bid_123" --wallet marketplace-provider
|
./aitbc-cli market accept-bid --service-id $SERVICE_ID --bid-id "bid_123" --wallet marketplace-provider
|
||||||
|
|
||||||
# Customer submits AI job
|
# Customer submits AI job
|
||||||
./aitbc-cli ai-submit --wallet customer-1 --type inference \
|
./aitbc-cli ai submit --wallet customer-1 --type inference \
|
||||||
--prompt "Generate a futuristic cityscape with flying cars" \
|
--prompt "Generate a futuristic cityscape with flying cars" \
|
||||||
--payment 120 --service-id $SERVICE_ID
|
--payment 120 --service-id $SERVICE_ID
|
||||||
|
|
||||||
# Monitor job completion
|
# Monitor job completion
|
||||||
./aitbc-cli ai-status --job-id "ai_job_123"
|
./aitbc-cli ai status --job-id "ai_job_123"
|
||||||
|
|
||||||
# Customer receives results
|
# Customer receives results
|
||||||
./aitbc-cli ai-results --job-id "ai_job_123"
|
./aitbc-cli ai results --job-id "ai_job_123"
|
||||||
|
|
||||||
# Verify transaction completed
|
# Verify transaction completed
|
||||||
./aitbc-cli balance --name customer-1
|
./aitbc-cli wallet balance customer-1
|
||||||
./aitbc-cli balance --name marketplace-provider
|
./aitbc-cli wallet balance marketplace-provider
|
||||||
```
|
```
|
||||||
|
|
||||||
### Scenario 2: GPU Rental + AI Training
|
### Scenario 2: GPU Rental + AI Training
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Researcher creates wallet and funds it
|
# Researcher creates wallet and funds it
|
||||||
./aitbc-cli create --name researcher-1 --password 123
|
./aitbc-cli wallet create researcher-1 123
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "researcher-1:" | cut -d" " -f2) --amount 2000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "researcher-1:" | cut -d" " -f2) 2000 123
|
||||||
|
|
||||||
# Researcher rents GPU for training
|
# Researcher rents GPU for training
|
||||||
GPU_SERVICE_ID=$(./aitbc-cli marketplace --action search --query "GPU" | grep "service_id" | head -1 | cut -d" " -f2)
|
GPU_SERVICE_ID=$(./aitbc-cli market search --query "GPU" | grep "service_id" | head -1 | cut -d" " -f2)
|
||||||
./aitbc-cli marketplace --action bid --service-id $GPU_SERVICE_ID --amount 60 --wallet researcher-1
|
./aitbc-cli market bid --service-id $GPU_SERVICE_ID --amount 60 --wallet researcher-1
|
||||||
|
|
||||||
# GPU provider accepts and allocates GPU
|
# GPU provider accepts and allocates GPU
|
||||||
./aitbc-cli marketplace --action accept-bid --service-id $GPU_SERVICE_ID --bid-id "bid_456" --wallet gpu-provider
|
./aitbc-cli market accept-bid --service-id $GPU_SERVICE_ID --bid-id "bid_456" --wallet gpu-provider
|
||||||
|
|
||||||
# Researcher submits training job with allocated GPU
|
# Researcher submits training job with allocated GPU
|
||||||
./aitbc-cli ai-submit --wallet researcher-1 --type training \
|
./aitbc-cli ai submit --wallet researcher-1 --type training \
|
||||||
--model "custom-classifier" --dataset "/data/training_data.csv" \
|
--model "custom-classifier" --dataset "/data/training_data.csv" \
|
||||||
--payment 500 --gpu-allocated 1 --memory 8192
|
--payment 500 --gpu-allocated 1 --memory 8192
|
||||||
|
|
||||||
# Monitor training progress
|
# Monitor training progress
|
||||||
./aitbc-cli ai-status --job-id "ai_job_456"
|
./aitbc-cli ai status --job-id "ai_job_456"
|
||||||
|
|
||||||
# Verify GPU utilization
|
# Verify GPU utilization
|
||||||
./aitbc-cli resource status --agent-id "gpu-worker-1"
|
./aitbc-cli resource status --agent-id "gpu-worker-1"
|
||||||
|
|
||||||
# Training completes and researcher gets model
|
# Training completes and researcher gets model
|
||||||
./aitbc-cli ai-results --job-id "ai_job_456"
|
./aitbc-cli ai results --job-id "ai_job_456"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Scenario 3: Multi-Service Pipeline
|
### Scenario 3: Multi-Service Pipeline
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Enterprise creates wallet and funds it
|
# Enterprise creates wallet and funds it
|
||||||
./aitbc-cli create --name enterprise-1 --password 123
|
./aitbc-cli wallet create enterprise-1 123
|
||||||
./aitbc-cli send --from genesis-ops --to $(./aitbc-cli list | grep "enterprise-1:" | cut -d" " -f2) --amount 5000 --password 123
|
./aitbc-cli wallet send genesis-ops $(./aitbc-cli wallet list | grep "enterprise-1:" | cut -d" " -f2) 5000 123
|
||||||
|
|
||||||
# Enterprise creates data processing pipeline
|
# Enterprise creates data processing pipeline
|
||||||
DATA_SERVICE_ID=$(./aitbc-cli marketplace --action search --query "data processing" | grep "service_id" | head -1 | cut -d" " -f2)
|
DATA_SERVICE_ID=$(./aitbc-cli market search --query "data processing" | grep "service_id" | head -1 | cut -d" " -f2)
|
||||||
./aitbc-cli marketplace --action bid --service-id $DATA_SERVICE_ID --amount 30 --wallet enterprise-1
|
./aitbc-cli market bid --service-id $DATA_SERVICE_ID --amount 30 --wallet enterprise-1
|
||||||
|
|
||||||
# Data provider processes raw data
|
# Data provider processes raw data
|
||||||
./aitbc-cli marketplace --action accept-bid --service-id $DATA_SERVICE_ID --bid-id "bid_789" --wallet marketplace-provider
|
./aitbc-cli market accept-bid --service-id $DATA_SERVICE_ID --bid-id "bid_789" --wallet marketplace-provider
|
||||||
|
|
||||||
# Enterprise submits AI analysis on processed data
|
# Enterprise submits AI analysis on processed data
|
||||||
./aitbc-cli ai-submit --wallet enterprise-1 --type inference \
|
./aitbc-cli ai submit --wallet enterprise-1 --type inference \
|
||||||
--prompt "Analyze processed data for trends and patterns" \
|
--prompt "Analyze processed data for trends and patterns" \
|
||||||
--payment 200 --input-data "/data/processed_data.csv"
|
--payment 200 --input-data "/data/processed_data.csv"
|
||||||
|
|
||||||
# Results are delivered and verified
|
# Results are delivered and verified
|
||||||
./aitbc-cli ai-results --job-id "ai_job_789"
|
./aitbc-cli ai results --job-id "ai_job_789"
|
||||||
|
|
||||||
# Enterprise pays for services
|
# Enterprise pays for services
|
||||||
./aitbc-cli marketplace --action settle-payment --service-id $DATA_SERVICE_ID --amount 30 --wallet enterprise-1
|
./aitbc-cli market settle-payment --service-id $DATA_SERVICE_ID --amount 30 --wallet enterprise-1
|
||||||
```
|
```
|
||||||
|
|
||||||
## GPU Provider Testing
|
## GPU Provider Testing
|
||||||
@@ -194,7 +186,7 @@ DATA_SERVICE_ID=$(./aitbc-cli marketplace --action search --query "data processi
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test GPU allocation and deallocation
|
# Test GPU allocation and deallocation
|
||||||
./aitbc-cli resource allocate --agent-id "gpu-worker-1" --gpu 1 --memory 8192 --duration 3600
|
./aitbc-cli resource allocate --agent-id "gpu-worker-1" --memory 8192 --duration 3600
|
||||||
|
|
||||||
# Verify GPU allocation
|
# Verify GPU allocation
|
||||||
./aitbc-cli resource status --agent-id "gpu-worker-1"
|
./aitbc-cli resource status --agent-id "gpu-worker-1"
|
||||||
@@ -207,7 +199,7 @@ DATA_SERVICE_ID=$(./aitbc-cli marketplace --action search --query "data processi
|
|||||||
|
|
||||||
# Test concurrent GPU allocations
|
# Test concurrent GPU allocations
|
||||||
for i in {1..5}; do
|
for i in {1..5}; do
|
||||||
./aitbc-cli resource allocate --agent-id "gpu-worker-$i" --gpu 1 --memory 8192 --duration 1800 &
|
./aitbc-cli resource allocate --agent-id "gpu-worker-$i" --memory 8192 --duration 1800 &
|
||||||
done
|
done
|
||||||
wait
|
wait
|
||||||
|
|
||||||
@@ -219,16 +211,16 @@ wait
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test GPU performance with different workloads
|
# Test GPU performance with different workloads
|
||||||
./aitbc-cli ai-submit --wallet gpu-provider --type inference \
|
./aitbc-cli ai submit --wallet gpu-provider --type inference \
|
||||||
--prompt "Generate high-resolution image" --payment 100 \
|
--prompt "Generate high-resolution image" --payment 100 \
|
||||||
--gpu-allocated 1 --resolution "1024x1024"
|
--gpu-allocated 1 --resolution "1024x1024"
|
||||||
|
|
||||||
./aitbc-cli ai-submit --wallet gpu-provider --type training \
|
./aitbc-cli ai submit --wallet gpu-provider --type training \
|
||||||
--model "large-model" --dataset "/data/large_dataset.csv" --payment 500 \
|
--model "large-model" --dataset "/data/large_dataset.csv" --payment 500 \
|
||||||
--gpu-allocated 1 --batch-size 64
|
--gpu-allocated 1 --batch-size 64
|
||||||
|
|
||||||
# Monitor GPU performance metrics
|
# Monitor GPU performance metrics
|
||||||
./aitbc-cli ai-metrics --agent-id "gpu-worker-1" --period "1h"
|
./aitbc-cli ai metrics --agent-id "gpu-worker-1" --period "1h"
|
||||||
|
|
||||||
# Test GPU memory management
|
# Test GPU memory management
|
||||||
./aitbc-cli resource test --type gpu --memory-stress --duration 300
|
./aitbc-cli resource test --type gpu --memory-stress --duration 300
|
||||||
@@ -238,13 +230,13 @@ wait
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test GPU provider revenue tracking
|
# Test GPU provider revenue tracking
|
||||||
./aitbc-cli marketplace --action revenue --wallet gpu-provider --period "24h"
|
./aitbc-cli market revenue --wallet gpu-provider --period "24h"
|
||||||
|
|
||||||
# Test GPU utilization optimization
|
# Test GPU utilization optimization
|
||||||
./aitbc-cli marketplace --action optimize --wallet gpu-provider --metric "utilization"
|
./aitbc-cli market optimize --wallet gpu-provider --metric "utilization"
|
||||||
|
|
||||||
# Test GPU pricing strategy
|
# Test GPU pricing strategy
|
||||||
./aitbc-cli marketplace --action pricing --service-id $GPU_SERVICE_ID --strategy "dynamic"
|
./aitbc-cli market pricing --service-id $GPU_SERVICE_ID --strategy "dynamic"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Transaction Tracking
|
## Transaction Tracking
|
||||||
@@ -253,45 +245,45 @@ wait
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Monitor all marketplace transactions
|
# Monitor all marketplace transactions
|
||||||
./aitbc-cli marketplace --action transactions --period "1h"
|
./aitbc-cli market transactions --period "1h"
|
||||||
|
|
||||||
# Track specific service transactions
|
# Track specific service transactions
|
||||||
./aitbc-cli marketplace --action transactions --service-id $SERVICE_ID
|
./aitbc-cli market transactions --service-id $SERVICE_ID
|
||||||
|
|
||||||
# Monitor customer transaction history
|
# Monitor customer transaction history
|
||||||
./aitbc-cli transactions --name customer-1 --limit 50
|
./aitbc-cli wallet transactions customer-1 --limit 50
|
||||||
|
|
||||||
# Track provider revenue
|
# Track provider revenue
|
||||||
./aitbc-cli marketplace --action revenue --wallet marketplace-provider --period "24h"
|
./aitbc-cli market revenue --wallet marketplace-provider --period "24h"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Transaction Verification
|
### Transaction Verification
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Verify transaction integrity
|
# Verify transaction integrity
|
||||||
./aitbc-cli transaction verify --tx-id "tx_123"
|
./aitbc-cli wallet transaction verify --tx-id "tx_123"
|
||||||
|
|
||||||
# Check transaction confirmation status
|
# Check transaction confirmation status
|
||||||
./aitbc-cli transaction status --tx-id "tx_123"
|
./aitbc-cli wallet transaction status --tx-id "tx_123"
|
||||||
|
|
||||||
# Verify marketplace settlement
|
# Verify marketplace settlement
|
||||||
./aitbc-cli marketplace --action verify-settlement --service-id $SERVICE_ID
|
./aitbc-cli market verify-settlement --service-id $SERVICE_ID
|
||||||
|
|
||||||
# Audit transaction trail
|
# Audit transaction trail
|
||||||
./aitbc-cli marketplace --action audit --period "24h"
|
./aitbc-cli market audit --period "24h"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Cross-Node Transaction Tracking
|
### Cross-Node Transaction Tracking
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Monitor transactions across both nodes
|
# Monitor transactions across both nodes
|
||||||
./aitbc-cli transactions --cross-node --period "1h"
|
./aitbc-cli wallet transactions --cross-node --period "1h"
|
||||||
|
|
||||||
# Verify transaction propagation
|
# Verify transaction propagation
|
||||||
./aitbc-cli transaction verify-propagation --tx-id "tx_123"
|
./aitbc-cli wallet transaction verify-propagation --tx-id "tx_123"
|
||||||
|
|
||||||
# Track cross-node marketplace activity
|
# Track cross-node marketplace activity
|
||||||
./aitbc-cli marketplace --action cross-node-stats --period "24h"
|
./aitbc-cli market cross-node-stats --period "24h"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Verification Procedures
|
## Verification Procedures
|
||||||
@@ -300,39 +292,39 @@ wait
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Verify service provider performance
|
# Verify service provider performance
|
||||||
./aitbc-cli marketplace --action verify-provider --wallet ai-service-provider
|
./aitbc-cli market verify-provider --wallet ai-service-provider
|
||||||
|
|
||||||
# Check service quality metrics
|
# Check service quality metrics
|
||||||
./aitbc-cli marketplace --action quality-metrics --service-id $SERVICE_ID
|
./aitbc-cli market quality-metrics --service-id $SERVICE_ID
|
||||||
|
|
||||||
# Verify customer satisfaction
|
# Verify customer satisfaction
|
||||||
./aitbc-cli marketplace --action satisfaction --wallet customer-1 --period "7d"
|
./aitbc-cli market satisfaction --wallet customer-1 --period "7d"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Compliance Verification
|
### Compliance Verification
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Verify marketplace compliance
|
# Verify marketplace compliance
|
||||||
./aitbc-cli marketplace --action compliance-check --period "24h"
|
./aitbc-cli market compliance-check --period "24h"
|
||||||
|
|
||||||
# Check regulatory compliance
|
# Check regulatory compliance
|
||||||
./aitbc-cli marketplace --action regulatory-audit --period "30d"
|
./aitbc-cli market regulatory-audit --period "30d"
|
||||||
|
|
||||||
# Verify data privacy compliance
|
# Verify data privacy compliance
|
||||||
./aitbc-cli marketplace --action privacy-audit --service-id $SERVICE_ID
|
./aitbc-cli market privacy-audit --service-id $SERVICE_ID
|
||||||
```
|
```
|
||||||
|
|
||||||
### Financial Verification
|
### Financial Verification
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Verify financial transactions
|
# Verify financial transactions
|
||||||
./aitbc-cli marketplace --action financial-audit --period "24h"
|
./aitbc-cli market financial-audit --period "24h"
|
||||||
|
|
||||||
# Check payment processing
|
# Check payment processing
|
||||||
./aitbc-cli marketplace --action payment-verify --period "1h"
|
./aitbc-cli market payment-verify --period "1h"
|
||||||
|
|
||||||
# Reconcile marketplace accounts
|
# Reconcile marketplace accounts
|
||||||
./aitbc-cli marketplace --action reconcile --period "24h"
|
./aitbc-cli market reconcile --period "24h"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Performance Testing
|
## Performance Testing
|
||||||
@@ -342,41 +334,41 @@ wait
|
|||||||
```bash
|
```bash
|
||||||
# Simulate high transaction volume
|
# Simulate high transaction volume
|
||||||
for i in {1..100}; do
|
for i in {1..100}; do
|
||||||
./aitbc-cli marketplace --action bid --service-id $SERVICE_ID --amount 100 --wallet test-wallet-$i &
|
./aitbc-cli market bid --service-id $SERVICE_ID --amount 100 --wallet test-wallet-$i &
|
||||||
done
|
done
|
||||||
wait
|
wait
|
||||||
|
|
||||||
# Monitor system performance under load
|
# Monitor system performance under load
|
||||||
./aitbc-cli marketplace --action performance-metrics --period "5m"
|
./aitbc-cli market performance-metrics --period "5m"
|
||||||
|
|
||||||
# Test marketplace scalability
|
# Test marketplace scalability
|
||||||
./aitbc-cli marketplace --action stress-test --transactions 1000 --concurrent 50
|
./aitbc-cli market stress-test --transactions 1000 --concurrent 50
|
||||||
```
|
```
|
||||||
|
|
||||||
### Latency Testing
|
### Latency Testing
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test transaction processing latency
|
# Test transaction processing latency
|
||||||
time ./aitbc-cli marketplace --action bid --service-id $SERVICE_ID --amount 100 --wallet test-wallet
|
time ./aitbc-cli market bid --service-id $SERVICE_ID --amount 100 --wallet test-wallet
|
||||||
|
|
||||||
# Test AI job submission latency
|
# Test AI job submission latency
|
||||||
time ./aitbc-cli ai-submit --wallet test-wallet --type inference --prompt "test" --payment 50
|
time ./aitbc-cli ai submit --wallet test-wallet --type inference --prompt "test" --payment 50
|
||||||
|
|
||||||
# Monitor overall system latency
|
# Monitor overall system latency
|
||||||
./aitbc-cli marketplace --action latency-metrics --period "1h"
|
./aitbc-cli market latency-metrics --period "1h"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Throughput Testing
|
### Throughput Testing
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test marketplace throughput
|
# Test marketplace throughput
|
||||||
./aitbc-cli marketplace --action throughput-test --duration 300 --transactions-per-second 10
|
./aitbc-cli market throughput-test --duration 300 --transactions-per-second 10
|
||||||
|
|
||||||
# Test AI job throughput
|
# Test AI job throughput
|
||||||
./aitbc-cli marketplace --action ai-throughput-test --duration 300 --jobs-per-minute 5
|
./aitbc-cli market ai-throughput-test --duration 300 --jobs-per-minute 5
|
||||||
|
|
||||||
# Monitor system capacity
|
# Monitor system capacity
|
||||||
./aitbc-cli marketplace --action capacity-metrics --period "24h"
|
./aitbc-cli market capacity-metrics --period "24h"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting Marketplace Issues
|
## Troubleshooting Marketplace Issues
|
||||||
@@ -395,16 +387,16 @@ time ./aitbc-cli ai-submit --wallet test-wallet --type inference --prompt "test"
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Diagnose marketplace connectivity
|
# Diagnose marketplace connectivity
|
||||||
./aitbc-cli marketplace --action connectivity-test
|
./aitbc-cli market connectivity-test
|
||||||
|
|
||||||
# Check marketplace service health
|
# Check marketplace service health
|
||||||
./aitbc-cli marketplace --action health-check
|
./aitbc-cli market health-check
|
||||||
|
|
||||||
# Verify marketplace data integrity
|
# Verify marketplace data integrity
|
||||||
./aitbc-cli marketplace --action integrity-check
|
./aitbc-cli market integrity-check
|
||||||
|
|
||||||
# Debug marketplace transactions
|
# Debug marketplace transactions
|
||||||
./aitbc-cli marketplace --action debug --transaction-id "tx_123"
|
./aitbc-cli market debug --transaction-id "tx_123"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Automation Scripts
|
## Automation Scripts
|
||||||
@@ -418,31 +410,30 @@ time ./aitbc-cli ai-submit --wallet test-wallet --type inference --prompt "test"
|
|||||||
echo "Starting automated marketplace testing..."
|
echo "Starting automated marketplace testing..."
|
||||||
|
|
||||||
# Create test wallets
|
# Create test wallets
|
||||||
./aitbc-cli create --name test-customer --password 123
|
./aitbc-cli wallet create test-customer 123
|
||||||
./aitbc-cli create --name test-provider --password 123
|
./aitbc-cli wallet create test-provider 123
|
||||||
|
|
||||||
# Fund test wallets
|
# Fund test wallets
|
||||||
CUSTOMER_ADDR=$(./aitbc-cli list | grep "test-customer:" | cut -d" " -f2)
|
CUSTOMER_ADDR=$(./aitbc-cli wallet list | grep "test-customer:" | cut -d" " -f2)
|
||||||
PROVIDER_ADDR=$(./aitbc-cli list | grep "test-provider:" | cut -d" " -f2)
|
PROVIDER_ADDR=$(./aitbc-cli wallet list | grep "test-provider:" | cut -d" " -f2)
|
||||||
|
|
||||||
./aitbc-cli send --from genesis-ops --to $CUSTOMER_ADDR --amount 1000 --password 123
|
./aitbc-cli wallet send genesis-ops $CUSTOMER_ADDR 1000 123
|
||||||
./aitbc-cli send --from genesis-ops --to $PROVIDER_ADDR --amount 1000 --password 123
|
./aitbc-cli wallet send genesis-ops $PROVIDER_ADDR 1000 123
|
||||||
|
|
||||||
# Create test service
|
# Create test service
|
||||||
./aitbc-cli marketplace --action create \
|
./aitbc-cli market create \
|
||||||
--name "Test AI Service" \
|
|
||||||
--type ai-inference \
|
--type ai-inference \
|
||||||
--price 50 \
|
--price 50 \
|
||||||
--wallet test-provider \
|
--wallet test-provider \
|
||||||
--description "Automated test service"
|
--description "Test AI Service"
|
||||||
|
|
||||||
# Test complete workflow
|
# Test complete workflow
|
||||||
SERVICE_ID=$(./aitbc-cli marketplace --action list | grep "Test AI Service" | grep "service_id" | cut -d" " -f2)
|
SERVICE_ID=$(./aitbc-cli market list | grep "Test AI Service" | grep "service_id" | cut -d" " -f2)
|
||||||
|
|
||||||
./aitbc-cli marketplace --action bid --service-id $SERVICE_ID --amount 60 --wallet test-customer
|
./aitbc-cli market bid --service-id $SERVICE_ID --amount 60 --wallet test-customer
|
||||||
./aitbc-cli marketplace --action accept-bid --service-id $SERVICE_ID --bid-id "test_bid" --wallet test-provider
|
./aitbc-cli market accept-bid --service-id $SERVICE_ID --bid-id "test_bid" --wallet test-provider
|
||||||
|
|
||||||
./aitbc-cli ai-submit --wallet test-customer --type inference --prompt "test image" --payment 60
|
./aitbc-cli ai submit --wallet test-customer --type inference --prompt "test image" --payment 60
|
||||||
|
|
||||||
# Verify results
|
# Verify results
|
||||||
echo "Test completed successfully!"
|
echo "Test completed successfully!"
|
||||||
@@ -458,9 +449,9 @@ while true; do
|
|||||||
TIMESTAMP=$(date +%Y-%m-%d_%H:%M:%S)
|
TIMESTAMP=$(date +%Y-%m-%d_%H:%M:%S)
|
||||||
|
|
||||||
# Collect metrics
|
# Collect metrics
|
||||||
ACTIVE_SERVICES=$(./aitbc-cli marketplace --action list | grep -c "service_id")
|
ACTIVE_SERVICES=$(./aitbc-cli market list | grep -c "service_id")
|
||||||
PENDING_BIDS=$(./aitbc-cli marketplace --action pending-bids | grep -c "bid_id")
|
PENDING_BIDS=$(./aitbc-cli market pending-bids | grep -c "bid_id")
|
||||||
TOTAL_VOLUME=$(./aitbc-cli marketplace --action volume --period "1h")
|
TOTAL_VOLUME=$(./aitbc-cli market volume --period "1h")
|
||||||
|
|
||||||
# Log metrics
|
# Log metrics
|
||||||
echo "$TIMESTAMP,services:$ACTIVE_SERVICES,bids:$PENDING_BIDS,volume:$TOTAL_VOLUME" >> /var/log/aitbc/marketplace_performance.log
|
echo "$TIMESTAMP,services:$ACTIVE_SERVICES,bids:$PENDING_BIDS,volume:$TOTAL_VOLUME" >> /var/log/aitbc/marketplace_performance.log
|
||||||
|
|||||||
@@ -53,18 +53,18 @@ watch -n 10 'curl -s http://localhost:8006/rpc/head | jq "{height: .height, time
|
|||||||
```bash
|
```bash
|
||||||
# Check wallet balances
|
# Check wallet balances
|
||||||
cd /opt/aitbc && source venv/bin/activate
|
cd /opt/aitbc && source venv/bin/activate
|
||||||
./aitbc-cli balance --name genesis-ops
|
./aitbc-cli wallet balance genesis-ops
|
||||||
./aitbc-cli balance --name user-wallet
|
./aitbc-cli wallet balance user-wallet
|
||||||
|
|
||||||
# Send transactions
|
# Send transactions
|
||||||
./aitbc-cli send --from genesis-ops --to user-wallet --amount 100 --password 123
|
./aitbc-cli wallet send genesis-ops user-wallet 100 123
|
||||||
|
|
||||||
# Check transaction history
|
# Check transaction history
|
||||||
./aitbc-cli transactions --name genesis-ops --limit 10
|
./aitbc-cli wallet transactions genesis-ops --limit 10
|
||||||
|
|
||||||
# Cross-node transaction
|
# Cross-node transaction
|
||||||
FOLLOWER_ADDR=$(ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli list | grep "follower-ops:" | cut -d" " -f2')
|
FOLLOWER_ADDR=$(ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet list | grep "follower-ops:" | cut -d" " -f2')
|
||||||
./aitbc-cli send --from genesis-ops --to $FOLLOWER_ADDR --amount 50 --password 123
|
./aitbc-cli wallet send genesis-ops $FOLLOWER_ADDR 50 123
|
||||||
```
|
```
|
||||||
|
|
||||||
## Health Monitoring
|
## Health Monitoring
|
||||||
@@ -216,7 +216,7 @@ curl -s http://localhost:8006/rpc/head | jq .height
|
|||||||
sudo grep "Failed password" /var/log/auth.log | tail -10
|
sudo grep "Failed password" /var/log/auth.log | tail -10
|
||||||
|
|
||||||
# Monitor blockchain for suspicious activity
|
# Monitor blockchain for suspicious activity
|
||||||
./aitbc-cli transactions --name genesis-ops --limit 20 | grep -E "(large|unusual)"
|
./aitbc-cli wallet transactions genesis-ops --limit 20 | grep -E "(large|unusual)"
|
||||||
|
|
||||||
# Check file permissions
|
# Check file permissions
|
||||||
ls -la /var/lib/aitbc/
|
ls -la /var/lib/aitbc/
|
||||||
|
|||||||
@@ -111,17 +111,17 @@ echo "Height difference: $((FOLLOWER_HEIGHT - GENESIS_HEIGHT))"
|
|||||||
```bash
|
```bash
|
||||||
# List all wallets
|
# List all wallets
|
||||||
cd /opt/aitbc && source venv/bin/activate
|
cd /opt/aitbc && source venv/bin/activate
|
||||||
./aitbc-cli list
|
./aitbc-cli wallet list
|
||||||
|
|
||||||
# Check specific wallet balance
|
# Check specific wallet balance
|
||||||
./aitbc-cli balance --name genesis-ops
|
./aitbc-cli wallet balance genesis-ops
|
||||||
./aitbc-cli balance --name follower-ops
|
./aitbc-cli wallet balance follower-ops
|
||||||
|
|
||||||
# Verify wallet addresses
|
# Verify wallet addresses
|
||||||
./aitbc-cli list | grep -E "(genesis-ops|follower-ops)"
|
./aitbc-cli wallet list | grep -E "(genesis-ops|follower-ops)"
|
||||||
|
|
||||||
# Test wallet operations
|
# Test wallet operations
|
||||||
./aitbc-cli send --from genesis-ops --to follower-ops --amount 10 --password 123
|
./aitbc-cli wallet send genesis-ops follower-ops 10 123
|
||||||
```
|
```
|
||||||
|
|
||||||
### Network Verification
|
### Network Verification
|
||||||
@@ -133,7 +133,7 @@ ssh aitbc1 'ping -c 3 localhost'
|
|||||||
|
|
||||||
# Test RPC endpoints
|
# Test RPC endpoints
|
||||||
curl -s http://localhost:8006/rpc/head > /dev/null && echo "Local RPC OK"
|
curl -s http://localhost:8006/rpc/head > /dev/null && echo "Local RPC OK"
|
||||||
ssh aitbc1 'curl -s http://localhost:8006/rpc/head > /dev/null && echo "Remote RPC OK"'
|
ssh aitbc1 'curl -s http://localhost:8007/rpc/head > /dev/null && echo "Remote RPC OK"'
|
||||||
|
|
||||||
# Test P2P connectivity
|
# Test P2P connectivity
|
||||||
telnet aitbc1 7070
|
telnet aitbc1 7070
|
||||||
@@ -146,16 +146,16 @@ ping -c 5 aitbc1 | tail -1
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Check AI services
|
# Check AI services
|
||||||
./aitbc-cli marketplace --action list
|
./aitbc-cli market list
|
||||||
|
|
||||||
# Test AI job submission
|
# Test AI job submission
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type inference --prompt "test" --payment 10
|
./aitbc-cli ai submit --wallet genesis-ops --type inference --prompt "test" --payment 10
|
||||||
|
|
||||||
# Verify resource allocation
|
# Verify resource allocation
|
||||||
./aitbc-cli resource status
|
./aitbc-cli resource status
|
||||||
|
|
||||||
# Check AI job status
|
# Check AI job status
|
||||||
./aitbc-cli ai-status --job-id "latest"
|
./aitbc-cli ai status --job-id "latest"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Smart Contract Verification
|
### Smart Contract Verification
|
||||||
@@ -263,16 +263,16 @@ Redis Service (for gossip)
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Quick health check
|
# Quick health check
|
||||||
./aitbc-cli chain && ./aitbc-cli network
|
./aitbc-cli blockchain info && ./aitbc-cli network status
|
||||||
|
|
||||||
# Service status
|
# Service status
|
||||||
systemctl status aitbc-blockchain-node.service aitbc-blockchain-rpc.service
|
systemctl status aitbc-blockchain-node.service aitbc-blockchain-rpc.service
|
||||||
|
|
||||||
# Cross-node sync check
|
# Cross-node sync check
|
||||||
curl -s http://localhost:8006/rpc/head | jq .height && ssh aitbc1 'curl -s http://localhost:8006/rpc/head | jq .height'
|
curl -s http://localhost:8006/rpc/head | jq .height && ssh aitbc1 'curl -s http://localhost:8007/rpc/head | jq .height'
|
||||||
|
|
||||||
# Wallet balance check
|
# Wallet balance check
|
||||||
./aitbc-cli balance --name genesis-ops
|
./aitbc-cli wallet balance genesis-ops
|
||||||
```
|
```
|
||||||
|
|
||||||
### Troubleshooting
|
### Troubleshooting
|
||||||
@@ -347,20 +347,20 @@ SESSION_ID="task-$(date +%s)"
|
|||||||
openclaw agent --agent main --session-id $SESSION_ID --message "Task description"
|
openclaw agent --agent main --session-id $SESSION_ID --message "Task description"
|
||||||
|
|
||||||
# Always verify transactions
|
# Always verify transactions
|
||||||
./aitbc-cli transactions --name wallet-name --limit 5
|
./aitbc-cli wallet transactions wallet-name --limit 5
|
||||||
|
|
||||||
# Monitor cross-node synchronization
|
# Monitor cross-node synchronization
|
||||||
watch -n 10 'curl -s http://localhost:8006/rpc/head | jq .height && ssh aitbc1 "curl -s http://localhost:8006/rpc/head | jq .height"'
|
watch -n 10 'curl -s http://localhost:8006/rpc/head | jq .height && ssh aitbc1 "curl -s http://localhost:8007/rpc/head | jq .height"'
|
||||||
```
|
```
|
||||||
|
|
||||||
### Development Best Practices
|
### Development Best Practices
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Test in development environment first
|
# Test in development environment first
|
||||||
./aitbc-cli send --from test-wallet --to test-wallet --amount 1 --password test
|
./aitbc-cli wallet send test-wallet test-wallet 1 test
|
||||||
|
|
||||||
# Use meaningful wallet names
|
# Use meaningful wallet names
|
||||||
./aitbc-cli create --name "genesis-operations" --password "strong_password"
|
./aitbc-cli wallet create "genesis-operations" "strong_password"
|
||||||
|
|
||||||
# Document all configuration changes
|
# Document all configuration changes
|
||||||
git add /etc/aitbc/.env
|
git add /etc/aitbc/.env
|
||||||
@@ -424,14 +424,14 @@ sudo systemctl restart aitbc-blockchain-node.service
|
|||||||
**Problem**: Wallet balance incorrect
|
**Problem**: Wallet balance incorrect
|
||||||
```bash
|
```bash
|
||||||
# Check correct node
|
# Check correct node
|
||||||
./aitbc-cli balance --name wallet-name
|
./aitbc-cli wallet balance wallet-name
|
||||||
ssh aitbc1 './aitbc-cli balance --name wallet-name'
|
ssh aitbc1 './aitbc-cli wallet balance wallet-name'
|
||||||
|
|
||||||
# Verify wallet address
|
# Verify wallet address
|
||||||
./aitbc-cli list | grep "wallet-name"
|
./aitbc-cli wallet list | grep "wallet-name"
|
||||||
|
|
||||||
# Check transaction history
|
# Check transaction history
|
||||||
./aitbc-cli transactions --name wallet-name --limit 10
|
./aitbc-cli wallet transactions wallet-name --limit 10
|
||||||
```
|
```
|
||||||
|
|
||||||
#### AI Operations Issues
|
#### AI Operations Issues
|
||||||
@@ -439,16 +439,16 @@ ssh aitbc1 './aitbc-cli balance --name wallet-name'
|
|||||||
**Problem**: AI jobs not processing
|
**Problem**: AI jobs not processing
|
||||||
```bash
|
```bash
|
||||||
# Check AI services
|
# Check AI services
|
||||||
./aitbc-cli marketplace --action list
|
./aitbc-cli market list
|
||||||
|
|
||||||
# Check resource allocation
|
# Check resource allocation
|
||||||
./aitbc-cli resource status
|
./aitbc-cli resource status
|
||||||
|
|
||||||
# Check job status
|
# Check AI job status
|
||||||
./aitbc-cli ai-status --job-id "job_id"
|
./aitbc-cli ai status --job-id "job_id"
|
||||||
|
|
||||||
# Verify wallet balance
|
# Verify wallet balance
|
||||||
./aitbc-cli balance --name wallet-name
|
./aitbc-cli wallet balance wallet-name
|
||||||
```
|
```
|
||||||
|
|
||||||
### Emergency Procedures
|
### Emergency Procedures
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ ssh aitbc1 '/opt/aitbc/scripts/workflow/03_follower_node_setup.sh'
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Monitor sync progress on both nodes
|
# Monitor sync progress on both nodes
|
||||||
watch -n 5 'echo "=== Genesis Node ===" && curl -s http://localhost:8006/rpc/head | jq .height && echo "=== Follower Node ===" && ssh aitbc1 "curl -s http://localhost:8006/rpc/head | jq .height"'
|
watch -n 5 'echo "=== Genesis Node ===" && curl -s http://localhost:8006/rpc/head | jq .height && echo "=== Follower Node ===" && ssh aitbc1 "curl -s http://localhost:8007/rpc/head | jq .height"'
|
||||||
```
|
```
|
||||||
|
|
||||||
### 5. Basic Wallet Operations
|
### 5. Basic Wallet Operations
|
||||||
@@ -113,30 +113,30 @@ watch -n 5 'echo "=== Genesis Node ===" && curl -s http://localhost:8006/rpc/hea
|
|||||||
cd /opt/aitbc && source venv/bin/activate
|
cd /opt/aitbc && source venv/bin/activate
|
||||||
|
|
||||||
# Create genesis operations wallet
|
# Create genesis operations wallet
|
||||||
./aitbc-cli create --name genesis-ops --password 123
|
./aitbc-cli wallet create genesis-ops 123
|
||||||
|
|
||||||
# Create user wallet
|
# Create user wallet
|
||||||
./aitbc-cli create --name user-wallet --password 123
|
./aitbc-cli wallet create user-wallet 123
|
||||||
|
|
||||||
# List wallets
|
# List wallets
|
||||||
./aitbc-cli list
|
./aitbc-cli wallet list
|
||||||
|
|
||||||
# Check balances
|
# Check balances
|
||||||
./aitbc-cli balance --name genesis-ops
|
./aitbc-cli wallet balance genesis-ops
|
||||||
./aitbc-cli balance --name user-wallet
|
./aitbc-cli wallet balance user-wallet
|
||||||
```
|
```
|
||||||
|
|
||||||
### 6. Cross-Node Transaction Test
|
### 6. Cross-Node Transaction Test
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Get follower node wallet address
|
# Get follower node wallet address
|
||||||
FOLLOWER_WALLET_ADDR=$(ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli create --name follower-ops --password 123 | grep "Address:" | cut -d" " -f2')
|
FOLLOWER_WALLET_ADDR=$(ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet create follower-ops 123 | grep "Address:" | cut -d" " -f2')
|
||||||
|
|
||||||
# Send transaction from genesis to follower
|
# Send transaction from genesis to follower
|
||||||
./aitbc-cli send --from genesis-ops --to $FOLLOWER_WALLET_ADDR --amount 1000 --password 123
|
./aitbc-cli wallet send genesis-ops $FOLLOWER_WALLET_ADDR 1000 123
|
||||||
|
|
||||||
# Verify transaction on follower node
|
# Verify transaction on follower node
|
||||||
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli balance --name follower-ops'
|
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet balance follower-ops'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Verification Commands
|
## Verification Commands
|
||||||
@@ -148,15 +148,15 @@ ssh aitbc1 'systemctl status aitbc-blockchain-node.service aitbc-blockchain-rpc.
|
|||||||
|
|
||||||
# Check blockchain heights match
|
# Check blockchain heights match
|
||||||
curl -s http://localhost:8006/rpc/head | jq .height
|
curl -s http://localhost:8006/rpc/head | jq .height
|
||||||
ssh aitbc1 'curl -s http://localhost:8006/rpc/head | jq .height'
|
ssh aitbc1 'curl -s http://localhost:8007/rpc/head | jq .height'
|
||||||
|
|
||||||
# Check network connectivity
|
# Check network connectivity
|
||||||
ping -c 3 aitbc1
|
ping -c 3 aitbc1
|
||||||
ssh aitbc1 'ping -c 3 localhost'
|
ssh aitbc1 'ping -c 3 localhost'
|
||||||
|
|
||||||
# Verify wallet creation
|
# Verify wallet creation
|
||||||
./aitbc-cli list
|
./aitbc-cli wallet list
|
||||||
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli list'
|
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet list'
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting Core Setup
|
## Troubleshooting Core Setup
|
||||||
|
|||||||
@@ -33,25 +33,25 @@ openclaw agent --agent main --session-id $SESSION_ID --message "Report progress"
|
|||||||
|
|
||||||
# AITBC CLI — always from /opt/aitbc with venv
|
# AITBC CLI — always from /opt/aitbc with venv
|
||||||
cd /opt/aitbc && source venv/bin/activate
|
cd /opt/aitbc && source venv/bin/activate
|
||||||
./aitbc-cli create --name wallet-name
|
./aitbc-cli wallet create wallet-name
|
||||||
./aitbc-cli list
|
./aitbc-cli wallet list
|
||||||
./aitbc-cli balance --name wallet-name
|
./aitbc-cli wallet balance wallet-name
|
||||||
./aitbc-cli send --from wallet1 --to address --amount 100 --password pass
|
./aitbc-cli wallet send wallet1 address 100 pass
|
||||||
./aitbc-cli chain
|
./aitbc-cli blockchain info
|
||||||
./aitbc-cli network
|
./aitbc-cli network status
|
||||||
|
|
||||||
# AI Operations (NEW)
|
# AI Operations (NEW)
|
||||||
./aitbc-cli ai-submit --wallet wallet --type inference --prompt "Generate image" --payment 100
|
./aitbc-cli ai submit --wallet wallet --type inference --prompt "Generate image" --payment 100
|
||||||
./aitbc-cli agent create --name ai-agent --description "AI agent"
|
./aitbc-cli agent create --name ai-agent --description "AI agent"
|
||||||
./aitbc-cli resource allocate --agent-id ai-agent --gpu 1 --memory 8192 --duration 3600
|
./aitbc-cli resource allocate --agent-id ai-agent --memory 8192 --duration 3600
|
||||||
./aitbc-cli marketplace --action create --name "AI Service" --price 50 --wallet wallet
|
./aitbc-cli market create --type ai-inference --price 50 --description "AI Service" --wallet wallet
|
||||||
|
|
||||||
# Cross-node — always activate venv on remote
|
# Cross-node — always activate venv on remote
|
||||||
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli list'
|
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet list'
|
||||||
|
|
||||||
# RPC checks
|
# RPC checks
|
||||||
curl -s http://localhost:8006/rpc/head | jq '.height'
|
curl -s http://localhost:8006/rpc/head | jq '.height'
|
||||||
ssh aitbc1 'curl -s http://localhost:8006/rpc/head | jq .height'
|
ssh aitbc1 'curl -s http://localhost:8007/rpc/head | jq .height'
|
||||||
|
|
||||||
# Smart Contract Messaging (NEW)
|
# Smart Contract Messaging (NEW)
|
||||||
curl -X POST http://localhost:8006/rpc/messaging/topics/create \
|
curl -X POST http://localhost:8006/rpc/messaging/topics/create \
|
||||||
@@ -219,11 +219,11 @@ openclaw agent --agent main --message "Teach me AITBC Agent Messaging Contract f
|
|||||||
```bash
|
```bash
|
||||||
# Blockchain height (both nodes)
|
# Blockchain height (both nodes)
|
||||||
curl -s http://localhost:8006/rpc/head | jq '.height'
|
curl -s http://localhost:8006/rpc/head | jq '.height'
|
||||||
ssh aitbc1 'curl -s http://localhost:8006/rpc/head | jq .height'
|
ssh aitbc1 'curl -s http://localhost:8007/rpc/head | jq .height'
|
||||||
|
|
||||||
# Wallets
|
# Wallets
|
||||||
cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli list
|
cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet list
|
||||||
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli list'
|
ssh aitbc1 'cd /opt/aitbc && source venv/bin/activate && ./aitbc-cli wallet list'
|
||||||
|
|
||||||
# Services
|
# Services
|
||||||
systemctl is-active aitbc-blockchain-{node,rpc}.service
|
systemctl is-active aitbc-blockchain-{node,rpc}.service
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
# AITBC1 Server Test Commands
|
|
||||||
|
|
||||||
## 🚀 **Sync and Test Instructions**
|
|
||||||
|
|
||||||
Run these commands on the **aitbc1 server** to test the workflow migration:
|
|
||||||
|
|
||||||
### **Step 1: Sync from Gitea**
|
|
||||||
```bash
|
|
||||||
# Navigate to AITBC directory
|
|
||||||
cd /opt/aitbc
|
|
||||||
|
|
||||||
# Pull latest changes from localhost aitbc (Gitea)
|
|
||||||
git pull origin main
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 2: Run Comprehensive Test**
|
|
||||||
```bash
|
|
||||||
# Execute the automated test script
|
|
||||||
./scripts/testing/aitbc1_sync_test.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 3: Manual Verification (Optional)**
|
|
||||||
```bash
|
|
||||||
# Check that pre-commit config is gone
|
|
||||||
ls -la .pre-commit-config.yaml
|
|
||||||
# Should show: No such file or directory
|
|
||||||
|
|
||||||
# Check workflow files exist
|
|
||||||
ls -la .windsurf/workflows/
|
|
||||||
# Should show: code-quality.md, type-checking-ci-cd.md, etc.
|
|
||||||
|
|
||||||
# Test git operations (no warnings)
|
|
||||||
echo "test" > test_file.txt
|
|
||||||
git add test_file.txt
|
|
||||||
git commit -m "test: verify no pre-commit warnings"
|
|
||||||
git reset --hard HEAD~1
|
|
||||||
rm test_file.txt
|
|
||||||
|
|
||||||
# Test type checking
|
|
||||||
./scripts/type-checking/check-coverage.sh
|
|
||||||
|
|
||||||
# Test MyPy
|
|
||||||
./venv/bin/mypy --ignore-missing-imports apps/coordinator-api/src/app/domain/job.py
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📋 **Expected Results**
|
|
||||||
|
|
||||||
### ✅ **Successful Sync**
|
|
||||||
- Git pull completes without errors
|
|
||||||
- Latest workflow files are available
|
|
||||||
- No pre-commit configuration file
|
|
||||||
|
|
||||||
### ✅ **No Pre-commit Warnings**
|
|
||||||
- Git add/commit operations work silently
|
|
||||||
- No "No .pre-commit-config.yaml file was found" messages
|
|
||||||
- Clean git operations
|
|
||||||
|
|
||||||
### ✅ **Workflow System Working**
|
|
||||||
- Type checking script executes
|
|
||||||
- MyPy runs on domain models
|
|
||||||
- Workflow documentation accessible
|
|
||||||
|
|
||||||
### ✅ **File Organization**
|
|
||||||
- `.windsurf/workflows/` contains workflow files
|
|
||||||
- `scripts/type-checking/` contains type checking tools
|
|
||||||
- `config/quality/` contains quality configurations
|
|
||||||
|
|
||||||
## 🔧 **Debugging**
|
|
||||||
|
|
||||||
### **If Git Pull Fails**
|
|
||||||
```bash
|
|
||||||
# Check remote configuration
|
|
||||||
git remote -v
|
|
||||||
|
|
||||||
# Force pull if needed
|
|
||||||
git fetch origin main
|
|
||||||
git reset --hard origin/main
|
|
||||||
```
|
|
||||||
|
|
||||||
### **If Type Checking Fails**
|
|
||||||
```bash
|
|
||||||
# Check dependencies
|
|
||||||
./venv/bin/pip install mypy sqlalchemy sqlmodel fastapi
|
|
||||||
|
|
||||||
# Check script permissions
|
|
||||||
chmod +x scripts/type-checking/check-coverage.sh
|
|
||||||
|
|
||||||
# Run manually
|
|
||||||
./venv/bin/mypy --ignore-missing-imports apps/coordinator-api/src/app/domain/
|
|
||||||
```
|
|
||||||
|
|
||||||
### **If Pre-commit Warnings Appear**
|
|
||||||
```bash
|
|
||||||
# Check if pre-commit is still installed
|
|
||||||
./venv/bin/pre-commit --version
|
|
||||||
|
|
||||||
# Uninstall if needed
|
|
||||||
./venv/bin/pre-commit uninstall
|
|
||||||
|
|
||||||
# Check git config
|
|
||||||
git config --get pre-commit.allowMissingConfig
|
|
||||||
# Should return: true
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📊 **Test Checklist**
|
|
||||||
|
|
||||||
- [ ] Git pull from Gitea successful
|
|
||||||
- [ ] No pre-commit warnings on git operations
|
|
||||||
- [ ] Workflow files present in `.windsurf/workflows/`
|
|
||||||
- [ ] Type checking script executable
|
|
||||||
- [ ] MyPy runs without errors
|
|
||||||
- [ ] Documentation accessible
|
|
||||||
- [ ] No `.pre-commit-config.yaml` file
|
|
||||||
- [ ] All tests in script pass
|
|
||||||
|
|
||||||
## 🎯 **Success Indicators**
|
|
||||||
|
|
||||||
### **Green Lights**
|
|
||||||
```
|
|
||||||
[SUCCESS] Successfully pulled from Gitea
|
|
||||||
[SUCCESS] Pre-commit config successfully removed
|
|
||||||
[SUCCESS] Type checking test passed
|
|
||||||
[SUCCESS] MyPy test on job.py passed
|
|
||||||
[SUCCESS] Git commit successful (no pre-commit warnings)
|
|
||||||
[SUCCESS] AITBC1 server sync and test completed successfully!
|
|
||||||
```
|
|
||||||
|
|
||||||
### **File Structure**
|
|
||||||
```
|
|
||||||
/opt/aitbc/
|
|
||||||
├── .windsurf/workflows/
|
|
||||||
│ ├── code-quality.md
|
|
||||||
│ ├── type-checking-ci-cd.md
|
|
||||||
│ └── MULTI_NODE_MASTER_INDEX.md
|
|
||||||
├── scripts/type-checking/
|
|
||||||
│ └── check-coverage.sh
|
|
||||||
├── config/quality/
|
|
||||||
│ └── requirements-consolidated.txt
|
|
||||||
└── (no .pre-commit-config.yaml file)
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Run these commands on aitbc1 server to verify the workflow migration is working correctly!**
|
|
||||||
@@ -1,135 +0,0 @@
|
|||||||
# AITBC1 Server - Updated Commands
|
|
||||||
|
|
||||||
## 🎯 **Status Update**
|
|
||||||
The aitbc1 server test was **mostly successful**! ✅
|
|
||||||
|
|
||||||
### **✅ What Worked**
|
|
||||||
- Git pull from Gitea: ✅ Successful
|
|
||||||
- Workflow files: ✅ Available (17 files)
|
|
||||||
- Pre-commit removal: ✅ Confirmed (no warnings)
|
|
||||||
- Git operations: ✅ No warnings on commit
|
|
||||||
|
|
||||||
### **⚠️ Minor Issues Fixed**
|
|
||||||
- Missing workflow files: ✅ Now pushed to Gitea
|
|
||||||
- .windsurf in .gitignore: ✅ Fixed (now tracking workflows)
|
|
||||||
|
|
||||||
## 🚀 **Updated Commands for AITBC1**
|
|
||||||
|
|
||||||
### **Step 1: Pull Latest Changes**
|
|
||||||
```bash
|
|
||||||
# On aitbc1 server:
|
|
||||||
cd /opt/aitbc
|
|
||||||
git pull origin main
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 2: Install Missing Dependencies**
|
|
||||||
```bash
|
|
||||||
# Install MyPy for type checking
|
|
||||||
./venv/bin/pip install mypy sqlalchemy sqlmodel fastapi
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 3: Verify New Workflow Files**
|
|
||||||
```bash
|
|
||||||
# Check that new workflow files are now available
|
|
||||||
ls -la .windsurf/workflows/code-quality.md
|
|
||||||
ls -la .windsurf/workflows/type-checking-ci-cd.md
|
|
||||||
|
|
||||||
# Should show both files exist
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 4: Test Type Checking**
|
|
||||||
```bash
|
|
||||||
# Now test type checking with dependencies installed
|
|
||||||
./scripts/type-checking/check-coverage.sh
|
|
||||||
|
|
||||||
# Test MyPy directly
|
|
||||||
./venv/bin/mypy --ignore-missing-imports apps/coordinator-api/src/app/domain/job.py
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Step 5: Run Full Test Again**
|
|
||||||
```bash
|
|
||||||
# Run the comprehensive test script again
|
|
||||||
./scripts/testing/aitbc1_sync_test.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📊 **Expected Results After Update**
|
|
||||||
|
|
||||||
### **✅ Perfect Test Output**
|
|
||||||
```
|
|
||||||
[SUCCESS] Successfully pulled from Gitea
|
|
||||||
[SUCCESS] Workflow directory found
|
|
||||||
[SUCCESS] Pre-commit config successfully removed
|
|
||||||
[SUCCESS] Type checking script found
|
|
||||||
[SUCCESS] Type checking test passed
|
|
||||||
[SUCCESS] MyPy test on job.py passed
|
|
||||||
[SUCCESS] Git commit successful (no pre-commit warnings)
|
|
||||||
[SUCCESS] AITBC1 server sync and test completed successfully!
|
|
||||||
```
|
|
||||||
|
|
||||||
### **📁 New Files Available**
|
|
||||||
```
|
|
||||||
.windsurf/workflows/
|
|
||||||
├── code-quality.md # ✅ NEW
|
|
||||||
├── type-checking-ci-cd.md # ✅ NEW
|
|
||||||
└── MULTI_NODE_MASTER_INDEX.md # ✅ Already present
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🔧 **If Issues Persist**
|
|
||||||
|
|
||||||
### **MyPy Still Not Found**
|
|
||||||
```bash
|
|
||||||
# Check venv activation
|
|
||||||
source ./venv/bin/activate
|
|
||||||
|
|
||||||
# Install in correct venv
|
|
||||||
pip install mypy sqlalchemy sqlmodel fastapi
|
|
||||||
|
|
||||||
# Verify installation
|
|
||||||
which mypy
|
|
||||||
./venv/bin/mypy --version
|
|
||||||
```
|
|
||||||
|
|
||||||
### **Workflow Files Still Missing**
|
|
||||||
```bash
|
|
||||||
# Force pull latest changes
|
|
||||||
git fetch origin main
|
|
||||||
git reset --hard origin/main
|
|
||||||
|
|
||||||
# Check files
|
|
||||||
find .windsurf/workflows/ -name "*.md" | wc -l
|
|
||||||
# Should show 19+ files
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🎉 **Success Criteria**
|
|
||||||
|
|
||||||
### **Complete Success Indicators**
|
|
||||||
- ✅ **Git operations**: No pre-commit warnings
|
|
||||||
- ✅ **Workflow files**: 19+ files available
|
|
||||||
- ✅ **Type checking**: MyPy working and script passing
|
|
||||||
- ✅ **Documentation**: New workflows accessible
|
|
||||||
- ✅ **Migration**: 100% complete
|
|
||||||
|
|
||||||
### **Final Verification**
|
|
||||||
```bash
|
|
||||||
# Quick verification commands
|
|
||||||
echo "=== Verification ==="
|
|
||||||
echo "1. Git operations (should be silent):"
|
|
||||||
echo "test" > verify.txt && git add verify.txt && git commit -m "verify" && git reset --hard HEAD~1 && rm verify.txt
|
|
||||||
|
|
||||||
echo "2. Workflow files:"
|
|
||||||
ls .windsurf/workflows/*.md | wc -l
|
|
||||||
|
|
||||||
echo "3. Type checking:"
|
|
||||||
./scripts/type-checking/check-coverage.sh | head -5
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📞 **Next Steps**
|
|
||||||
|
|
||||||
1. **Run the updated commands** above on aitbc1
|
|
||||||
2. **Verify all tests pass** with new dependencies
|
|
||||||
3. **Test the new workflow system** instead of pre-commit
|
|
||||||
4. **Enjoy the improved documentation** and organization!
|
|
||||||
|
|
||||||
**The migration is essentially complete - just need to install MyPy dependencies on aitbc1!** 🚀
|
|
||||||
@@ -1,162 +0,0 @@
|
|||||||
# Python 3.13 Version Status
|
|
||||||
|
|
||||||
## 🎯 **Current Status Report**
|
|
||||||
|
|
||||||
### **✅ You're Already Running the Latest!**
|
|
||||||
|
|
||||||
Your current Python installation is **already up-to-date**:
|
|
||||||
|
|
||||||
```
|
|
||||||
System Python: 3.13.5
|
|
||||||
Virtual Environment: 3.13.5
|
|
||||||
Latest Available: 3.13.5
|
|
||||||
```
|
|
||||||
|
|
||||||
### **📊 Version Details**
|
|
||||||
|
|
||||||
#### **Current Installation**
|
|
||||||
```bash
|
|
||||||
# System Python
|
|
||||||
python3.13 --version
|
|
||||||
# Output: Python 3.13.5
|
|
||||||
|
|
||||||
# Virtual Environment
|
|
||||||
./venv/bin/python --version
|
|
||||||
# Output: Python 3.13.5
|
|
||||||
|
|
||||||
# venv Configuration
|
|
||||||
cat venv/pyvenv.cfg
|
|
||||||
# version = 3.13.5
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **Package Installation Status**
|
|
||||||
All Python 3.13 packages are properly installed:
|
|
||||||
- ✅ python3.13 (3.13.5-2)
|
|
||||||
- ✅ python3.13-dev (3.13.5-2)
|
|
||||||
- ✅ python3.13-venv (3.13.5-2)
|
|
||||||
- ✅ libpython3.13-dev (3.13.5-2)
|
|
||||||
- ✅ All supporting packages
|
|
||||||
|
|
||||||
### **🔍 Verification Commands**
|
|
||||||
|
|
||||||
#### **Check Current Version**
|
|
||||||
```bash
|
|
||||||
# System version
|
|
||||||
python3.13 --version
|
|
||||||
|
|
||||||
# Virtual environment version
|
|
||||||
./venv/bin/python --version
|
|
||||||
|
|
||||||
# Package list
|
|
||||||
apt list --installed | grep python3.13
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **Check for Updates**
|
|
||||||
```bash
|
|
||||||
# Check for available updates
|
|
||||||
apt update
|
|
||||||
apt list --upgradable | grep python3.13
|
|
||||||
|
|
||||||
# Currently: No updates available
|
|
||||||
# Status: Running latest version
|
|
||||||
```
|
|
||||||
|
|
||||||
### **🚀 Performance Benefits of Python 3.13.5**
|
|
||||||
|
|
||||||
#### **Key Improvements**
|
|
||||||
- **🚀 Performance**: 5-10% faster than 3.12
|
|
||||||
- **🧠 Memory**: Better memory management
|
|
||||||
- **🔧 Error Messages**: Improved error reporting
|
|
||||||
- **🛡️ Security**: Latest security patches
|
|
||||||
- **⚡ Compilation**: Faster startup times
|
|
||||||
|
|
||||||
#### **AITBC-Specific Benefits**
|
|
||||||
- **Type Checking**: Better MyPy integration
|
|
||||||
- **FastAPI**: Improved async performance
|
|
||||||
- **SQLAlchemy**: Optimized database operations
|
|
||||||
- **AI/ML**: Enhanced numpy/pandas compatibility
|
|
||||||
|
|
||||||
### **📋 Maintenance Checklist**
|
|
||||||
|
|
||||||
#### **Monthly Check**
|
|
||||||
```bash
|
|
||||||
# Check for Python updates
|
|
||||||
apt update
|
|
||||||
apt list --upgradable | grep python3.13
|
|
||||||
|
|
||||||
# Check venv integrity
|
|
||||||
./venv/bin/python --version
|
|
||||||
./venv/bin/pip list --outdated
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **Quarterly Maintenance**
|
|
||||||
```bash
|
|
||||||
# Update system packages
|
|
||||||
apt update && apt upgrade -y
|
|
||||||
|
|
||||||
# Update pip packages
|
|
||||||
./venv/bin/pip install --upgrade pip
|
|
||||||
./venv/bin/pip list --outdated
|
|
||||||
./venv/bin/p install --upgrade <package-name>
|
|
||||||
```
|
|
||||||
|
|
||||||
### **🔄 Future Upgrade Path**
|
|
||||||
|
|
||||||
#### **When Python 3.14 is Released**
|
|
||||||
```bash
|
|
||||||
# Monitor for new releases
|
|
||||||
apt search python3.14
|
|
||||||
|
|
||||||
# Upgrade path (when available)
|
|
||||||
apt install python3.14 python3.14-venv
|
|
||||||
|
|
||||||
# Recreate virtual environment
|
|
||||||
deactivate
|
|
||||||
rm -rf venv
|
|
||||||
python3.14 -m venv venv
|
|
||||||
source venv/bin/activate
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
### **🎯 Current Recommendations**
|
|
||||||
|
|
||||||
#### **Immediate Actions**
|
|
||||||
- ✅ **No action needed**: Already running latest 3.13.5
|
|
||||||
- ✅ **System is optimal**: All packages up-to-date
|
|
||||||
- ✅ **Performance optimized**: Latest improvements applied
|
|
||||||
|
|
||||||
#### **Monitoring**
|
|
||||||
- **Monthly**: Check for security updates
|
|
||||||
- **Quarterly**: Update pip packages
|
|
||||||
- **Annually**: Review Python version strategy
|
|
||||||
|
|
||||||
### **📈 Version History**
|
|
||||||
|
|
||||||
| Version | Release Date | Status | Notes |
|
|
||||||
|---------|--------------|--------|-------|
|
|
||||||
| 3.13.5 | Current | ✅ Active | Latest stable |
|
|
||||||
| 3.13.4 | Previous | ✅ Supported | Security fixes |
|
|
||||||
| 3.13.3 | Previous | ✅ Supported | Bug fixes |
|
|
||||||
| 3.13.2 | Previous | ✅ Supported | Performance |
|
|
||||||
| 3.13.1 | Previous | ✅ Supported | Stability |
|
|
||||||
| 3.13.0 | Previous | ✅ Supported | Initial release |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎉 **Summary**
|
|
||||||
|
|
||||||
**You're already running the latest and greatest Python 3.13.5!**
|
|
||||||
|
|
||||||
- ✅ **Latest Version**: 3.13.5 (most recent stable)
|
|
||||||
- ✅ **All Packages Updated**: Complete installation
|
|
||||||
- ✅ **Optimal Performance**: Latest improvements
|
|
||||||
- ✅ **Security Current**: Latest patches applied
|
|
||||||
- ✅ **AITBC Ready**: Perfect for your project needs
|
|
||||||
|
|
||||||
**No upgrade needed - you're already at the forefront!** 🚀
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*Last Checked: April 1, 2026*
|
|
||||||
*Status: ✅ UP TO DATE*
|
|
||||||
*Next Check: May 1, 2026*
|
|
||||||
760
README.md
760
README.md
@@ -1,715 +1,95 @@
|
|||||||
# AITBC - AI Training Blockchain
|
# AITBC - Advanced Intelligence Training Blockchain Consortium
|
||||||
|
|
||||||
**Advanced AI Platform with OpenClaw Agent Ecosystem**
|
## Project Structure
|
||||||
|
|
||||||
[](docs/README.md)
|
This project has been organized for better maintainability. Here's the directory structure:
|
||||||
[](docs/about/PHASE_3_COMPLETION_10_10_ACHIEVED.md)
|
|
||||||
[](docs/README.md#-current-status-production-ready---march-18-2026)
|
|
||||||
[](docs/openclaw/OPENCLAW_AGENT_CAPABILITIES_ADVANCED.md)
|
|
||||||
[](LICENSE)
|
|
||||||
|
|
||||||
---
|
### 📁 Essential Root Files
|
||||||
|
- `LICENSE` - Project license
|
||||||
|
- `aitbc-cli` - Main CLI symlink
|
||||||
|
- `README.md` - This file
|
||||||
|
|
||||||
## 🎯 **What is AITBC?**
|
### 📁 Core Directories
|
||||||
|
- `aitbc/` - Core AITBC Python package
|
||||||
|
- `cli/` - Command-line interface implementation
|
||||||
|
- `contracts/` - Smart contracts
|
||||||
|
- `scripts/` - Automation and deployment scripts
|
||||||
|
- `services/` - Microservices
|
||||||
|
- `tests/` - Test suites
|
||||||
|
|
||||||
AITBC (AI Training Blockchain) is a revolutionary platform that combines **advanced AI capabilities** with **OpenClaw agent ecosystem** on a **blockchain infrastructure**. Our platform enables:
|
### 📁 Configuration
|
||||||
|
- `project-config/` - Project configuration files
|
||||||
|
- `pyproject.toml` - Python project configuration
|
||||||
|
- `requirements.txt` - Python dependencies
|
||||||
|
- `poetry.lock` - Dependency lock file
|
||||||
|
- `.gitignore` - Git ignore rules
|
||||||
|
- `.deployment_progress` - Deployment tracking
|
||||||
|
|
||||||
- **🤖 Advanced AI Operations**: Complex workflow orchestration, multi-model pipelines, resource optimization
|
### 📁 Documentation
|
||||||
- **🦞 OpenClaw Agents**: Intelligent agents with advanced AI teaching plan mastery (100% complete)
|
- `docs/` - Comprehensive documentation
|
||||||
- **🔒 Privacy Preservation**: Secure, private ML model training and inference
|
- `README.md` - Main project documentation
|
||||||
- **⚡ Edge Computing**: Distributed computation at the network edge
|
- `SETUP.md` - Setup instructions
|
||||||
- **⛓️ Blockchain Security**: Immutable, transparent, and secure transactions
|
- `PYTHON_VERSION_STATUS.md` - Python compatibility
|
||||||
- **🌐 Multi-Chain Support**: Interoperable blockchain ecosystem
|
- `AITBC1_TEST_COMMANDS.md` - Testing commands
|
||||||
|
- `AITBC1_UPDATED_COMMANDS.md` - Updated commands
|
||||||
|
- `README_DOCUMENTATION.md` - Detailed documentation
|
||||||
|
|
||||||
### 🎓 **Advanced AI Teaching Plan - 100% Complete**
|
### 📁 Development
|
||||||
|
- `dev/` - Development tools and examples
|
||||||
|
- `.windsurf/` - IDE configuration
|
||||||
|
- `packages/` - Package distributions
|
||||||
|
- `extensions/` - Browser extensions
|
||||||
|
- `plugins/` - System plugins
|
||||||
|
|
||||||
Our OpenClaw agents have mastered advanced AI capabilities through a comprehensive 3-phase teaching program:
|
### 📁 Infrastructure
|
||||||
|
- `infra/` - Infrastructure as code
|
||||||
|
- `systemd/` - System service configurations
|
||||||
|
- `monitoring/` - Monitoring setup
|
||||||
|
|
||||||
- **📚 Phase 1**: Advanced AI Workflow Orchestration (Complex pipelines, parallel operations)
|
### 📁 Applications
|
||||||
- **📚 Phase 2**: Multi-Model AI Pipelines (Ensemble management, multi-modal processing)
|
- `apps/` - Application components
|
||||||
- **📚 Phase 3**: AI Resource Optimization (Dynamic allocation, performance tuning)
|
- `services/` - Service implementations
|
||||||
|
- `website/` - Web interface
|
||||||
|
|
||||||
**🤖 Agent Capabilities**: Medical diagnosis, customer feedback analysis, AI service provider optimization
|
### 📁 AI & GPU
|
||||||
|
- `gpu_acceleration/` - GPU optimization
|
||||||
|
- `ai-ml/` - AI/ML components
|
||||||
|
|
||||||
---
|
### 📁 Security & Backup
|
||||||
|
- `security/` - Security reports and fixes
|
||||||
|
- `backup-config/` - Backup configurations
|
||||||
|
- `backups/` - Data backups
|
||||||
|
|
||||||
## 🚀 **Quick Start**
|
### 📁 Cache & Logs
|
||||||
|
- `venv/` - Python virtual environment
|
||||||
|
- `logs/` - Application logs
|
||||||
|
- `.mypy_cache/`, `.pytest_cache/`, `.ruff_cache/` - Tool caches
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
### **👤 For Users:**
|
|
||||||
```bash
|
```bash
|
||||||
# Install CLI
|
# Setup environment
|
||||||
git clone https://github.com/oib/AITBC.git
|
|
||||||
cd AITBC/cli
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
# Start using AITBC
|
|
||||||
aitbc --help
|
|
||||||
aitbc version
|
|
||||||
|
|
||||||
# Try advanced AI operations
|
|
||||||
aitbc ai-submit --wallet genesis-ops --type multimodal --prompt "Multi-modal AI analysis" --payment 1000
|
|
||||||
```
|
|
||||||
|
|
||||||
### **🤖 For OpenClaw Agent Users:**
|
|
||||||
```bash
|
|
||||||
# Run advanced AI workflow
|
|
||||||
cd /opt/aitbc
|
cd /opt/aitbc
|
||||||
./scripts/workflow-openclaw/06_advanced_ai_workflow_openclaw.sh
|
|
||||||
|
|
||||||
# Use OpenClaw agents directly
|
|
||||||
openclaw agent --agent GenesisAgent --session-id "my-session" --message "Execute advanced AI workflow" --thinking high
|
|
||||||
```
|
|
||||||
|
|
||||||
### **👨💻 For Developers:**
|
|
||||||
```bash
|
|
||||||
# Setup development environment
|
|
||||||
git clone https://github.com/oib/AITBC.git
|
|
||||||
cd AITBC
|
|
||||||
./scripts/setup.sh
|
|
||||||
|
|
||||||
# Install with dependency profiles
|
|
||||||
./scripts/install-profiles.sh minimal
|
|
||||||
./scripts/install-profiles.sh web database
|
|
||||||
|
|
||||||
# Run code quality checks
|
|
||||||
./venv/bin/pre-commit run --all-files
|
|
||||||
./venv/bin/mypy --ignore-missing-imports apps/coordinator-api/src/app/domain/
|
|
||||||
|
|
||||||
# Start development services
|
|
||||||
./scripts/development/dev-services.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### **⛏️ For Miners:**
|
|
||||||
```bash
|
|
||||||
# Start mining
|
|
||||||
aitbc miner start --config miner-config.yaml
|
|
||||||
|
|
||||||
# Check mining status
|
|
||||||
aitbc miner status
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 **Current Status: PRODUCTION READY**
|
|
||||||
|
|
||||||
**🎉 Achievement Date**: March 18, 2026
|
|
||||||
**🎓 Advanced AI Teaching Plan**: March 30, 2026 (100% Complete)
|
|
||||||
**📈 Quality Score**: 10/10 (Perfect Documentation)
|
|
||||||
**🔧 Infrastructure**: Fully operational production environment
|
|
||||||
|
|
||||||
### ✅ **Completed Features (100%)**
|
|
||||||
- **🏗️ Core Infrastructure**: Coordinator API, Blockchain Node, Miner Node fully operational
|
|
||||||
- **💻 Enhanced CLI System**: 30+ command groups with comprehensive testing (91% success rate)
|
|
||||||
- **🔄 Exchange Infrastructure**: Complete exchange CLI commands and market integration
|
|
||||||
- **⛓️ Multi-Chain Support**: Complete 7-layer architecture with chain isolation
|
|
||||||
- **🤖 Advanced AI Operations**: Complex workflow orchestration, multi-model pipelines, resource optimization
|
|
||||||
- **🦞 OpenClaw Agent Ecosystem**: Advanced AI agents with 3-phase teaching plan mastery
|
|
||||||
- **🔒 Security**: Multi-sig, time-lock, and compliance features implemented
|
|
||||||
- **🚀 Production Setup**: Complete production blockchain setup with encrypted keystores
|
|
||||||
- **🧠 AI Memory System**: Development knowledge base and agent documentation
|
|
||||||
- **🛡️ Enhanced Security**: Secure pickle deserialization and vulnerability scanning
|
|
||||||
- **📁 Repository Organization**: Professional structure with clean root directory
|
|
||||||
- **🔄 Cross-Platform Sync**: GitHub ↔ Gitea fully synchronized
|
|
||||||
- **⚡ Code Quality Excellence**: Pre-commit hooks, Black formatting, type checking (CI/CD integrated)
|
|
||||||
- **📦 Dependency Consolidation**: Unified dependency management with installation profiles
|
|
||||||
- **🔍 Type Checking Implementation**: Comprehensive type safety with 100% core domain coverage
|
|
||||||
- **📊 Project Organization**: Clean root directory with logical file grouping
|
|
||||||
|
|
||||||
### 🎯 **Latest Achievements (March 31, 2026)**
|
|
||||||
- **🎉 Perfect Documentation**: 10/10 quality score achieved
|
|
||||||
- **🎓 Advanced AI Teaching Plan**: 100% complete (3 phases, 6 sessions)
|
|
||||||
- **🤖 OpenClaw Agent Mastery**: Advanced AI workflow orchestration, multi-model pipelines, resource optimization
|
|
||||||
- **⛓️ Multi-Chain System**: Complete 7-layer architecture operational
|
|
||||||
- **📚 Documentation Excellence**: World-class documentation with perfect organization
|
|
||||||
- **⚡ Code Quality Implementation**: Full automated quality checks with type safety
|
|
||||||
- **📦 Dependency Management**: Consolidated dependencies with profile-based installations
|
|
||||||
- **🔍 Type Checking**: Complete MyPy implementation with CI/CD integration
|
|
||||||
- **📁 Project Organization**: Professional structure with 52% root file reduction
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 **Project Structure**
|
|
||||||
|
|
||||||
The AITBC project is organized with a clean root directory containing only essential files:
|
|
||||||
|
|
||||||
```
|
|
||||||
/opt/aitbc/
|
|
||||||
├── README.md # Main documentation
|
|
||||||
├── SETUP.md # Setup guide
|
|
||||||
├── LICENSE # Project license
|
|
||||||
├── pyproject.toml # Python configuration
|
|
||||||
├── requirements.txt # Dependencies
|
|
||||||
├── .pre-commit-config.yaml # Code quality hooks
|
|
||||||
├── apps/ # Application services
|
|
||||||
├── cli/ # Command-line interface
|
|
||||||
├── scripts/ # Automation scripts
|
|
||||||
├── config/ # Configuration files
|
|
||||||
├── docs/ # Documentation
|
|
||||||
├── tests/ # Test suite
|
|
||||||
├── infra/ # Infrastructure
|
|
||||||
└── contracts/ # Smart contracts
|
|
||||||
```
|
|
||||||
|
|
||||||
### Key Directories
|
|
||||||
- **`apps/`** - Core application services (coordinator-api, blockchain-node, etc.)
|
|
||||||
- **`scripts/`** - Setup and automation scripts
|
|
||||||
- **`config/quality/`** - Code quality tools and configurations
|
|
||||||
- **`docs/reports/`** - Implementation reports and summaries
|
|
||||||
- **`cli/`** - Command-line interface tools
|
|
||||||
|
|
||||||
For detailed structure information, see [PROJECT_STRUCTURE.md](docs/PROJECT_STRUCTURE.md).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ⚡ **Recent Improvements (March 2026)**
|
|
||||||
|
|
||||||
### **<2A> Code Quality Excellence**
|
|
||||||
- **Pre-commit Hooks**: Automated quality checks on every commit
|
|
||||||
- **Black Formatting**: Consistent code formatting across all files
|
|
||||||
- **Type Checking**: Comprehensive MyPy implementation with CI/CD integration
|
|
||||||
- **Import Sorting**: Standardized import organization with isort
|
|
||||||
- **Linting Rules**: Ruff configuration for code quality enforcement
|
|
||||||
|
|
||||||
### **📦 Dependency Management**
|
|
||||||
- **Consolidated Dependencies**: Unified dependency management across all services
|
|
||||||
- **Installation Profiles**: Profile-based installations (minimal, web, database, blockchain)
|
|
||||||
- **Version Conflicts**: Eliminated all dependency version conflicts
|
|
||||||
- **Service Migration**: Updated all services to use consolidated dependencies
|
|
||||||
|
|
||||||
### **📁 Project Organization**
|
|
||||||
- **Clean Root Directory**: Reduced from 25+ files to 12 essential files
|
|
||||||
- **Logical Grouping**: Related files organized into appropriate subdirectories
|
|
||||||
- **Professional Structure**: Follows Python project best practices
|
|
||||||
- **Documentation**: Comprehensive project structure documentation
|
|
||||||
|
|
||||||
### **🚀 Developer Experience**
|
|
||||||
- **Automated Quality**: Pre-commit hooks and CI/CD integration
|
|
||||||
- **Type Safety**: 100% type coverage for core domain models
|
|
||||||
- **Fast Installation**: Profile-based dependency installation
|
|
||||||
- **Clear Documentation**: Updated guides and implementation reports
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 🤖 **Advanced AI Capabilities**
|
|
||||||
- **📚 Phase 1**: Advanced AI Workflow Orchestration (Complex pipelines, parallel operations)
|
|
||||||
- **📚 Phase 2**: Multi-Model AI Pipelines (Ensemble management, multi-modal processing)
|
|
||||||
- **📚 Phase 3**: AI Resource Optimization (Dynamic allocation, performance tuning)
|
|
||||||
- **🎓 Agent Mastery**: Genesis, Follower, Coordinator, AI Resource, Multi-Modal agents
|
|
||||||
- **🔄 Cross-Node Coordination**: Smart contract messaging and distributed optimization
|
|
||||||
|
|
||||||
### 📋 **Current Release: v0.2.3**
|
|
||||||
- **Release Date**: March 2026
|
|
||||||
- **Focus**: Advanced AI Teaching Plan completion and AI Economics Masters transformation
|
|
||||||
- **📖 Release Notes**: [View detailed release notes](RELEASE_v0.2.3.md)
|
|
||||||
- **🎯 Status**: Production ready with AI Economics Masters capabilities
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🏗️ **Architecture Overview**
|
|
||||||
|
|
||||||
```
|
|
||||||
AITBC Ecosystem
|
|
||||||
├── 🤖 Advanced AI Components
|
|
||||||
│ ├── Complex AI Workflow Orchestration (Phase 1)
|
|
||||||
│ ├── Multi-Model AI Pipelines (Phase 2)
|
|
||||||
│ ├── AI Resource Optimization (Phase 3)
|
|
||||||
│ ├── OpenClaw Agent Ecosystem
|
|
||||||
│ │ ├── Genesis Agent (Advanced AI operations)
|
|
||||||
│ │ ├── Follower Agent (Distributed coordination)
|
|
||||||
│ │ ├── Coordinator Agent (Multi-agent orchestration)
|
|
||||||
│ │ ├── AI Resource Agent (Resource management)
|
|
||||||
│ │ └── Multi-Modal Agent (Cross-modal processing)
|
|
||||||
│ ├── Trading Engine with ML predictions
|
|
||||||
│ ├── Surveillance System (88-94% accuracy)
|
|
||||||
│ ├── Analytics Platform
|
|
||||||
│ └── Agent SDK for custom AI agents
|
|
||||||
├── ⛓️ Blockchain Infrastructure
|
|
||||||
│ ├── Multi-Chain Support (7-layer architecture)
|
|
||||||
│ ├── Privacy-Preserving Transactions
|
|
||||||
│ ├── Smart Contract Integration
|
|
||||||
│ ├── Cross-Chain Protocols
|
|
||||||
│ └── Agent Messaging Contracts
|
|
||||||
├── 💻 Developer Tools
|
|
||||||
│ ├── Comprehensive CLI (30+ commands)
|
|
||||||
│ ├── Advanced AI Operations (ai-submit, ai-ops)
|
|
||||||
│ ├── Resource Management (resource allocate, monitor)
|
|
||||||
│ ├── Simulation Framework (simulate blockchain, wallets, price, network, ai-jobs)
|
|
||||||
│ ├── Agent Development Kit
|
|
||||||
│ ├── Testing Framework (91% success rate)
|
|
||||||
│ └── API Documentation
|
|
||||||
├── 🔒 Security & Compliance
|
|
||||||
│ ├── Multi-Sig Wallets
|
|
||||||
│ ├── Time-Lock Transactions
|
|
||||||
│ ├── KYC/AML Integration
|
|
||||||
│ └── Security Auditing
|
|
||||||
└── 🌐 Ecosystem Services
|
|
||||||
├── Exchange Integration
|
|
||||||
├── Marketplace Platform
|
|
||||||
├── Governance System
|
|
||||||
├── OpenClaw Agent Coordination
|
|
||||||
└── Community Tools
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📚 **Documentation**
|
|
||||||
|
|
||||||
Our documentation has achieved **perfect 10/10 quality score** and provides comprehensive guidance for all users:
|
|
||||||
|
|
||||||
### **🎯 Learning Paths:**
|
|
||||||
- **👤 [Beginner Guide](docs/beginner/README.md)** - Start here (8-15 hours)
|
|
||||||
- **🌉 [Intermediate Topics](docs/intermediate/README.md)** - Bridge concepts (18-28 hours)
|
|
||||||
- **🚀 [Advanced Documentation](docs/advanced/README.md)** - Deep technical (20-30 hours)
|
|
||||||
- **🎓 [Expert Topics](docs/expert/README.md)** - Specialized expertise (24-48 hours)
|
|
||||||
- **🤖 [OpenClaw Agent Capabilities](docs/openclaw/OPENCLAW_AGENT_CAPABILITIES_ADVANCED.md)** - Advanced AI agents (15-25 hours)
|
|
||||||
|
|
||||||
### **📚 Quick Access:**
|
|
||||||
- **🔍 [Master Index](docs/MASTER_INDEX.md)** - Complete content catalog
|
|
||||||
- **🏠 [Documentation Home](docs/README.md)** - Main documentation entry
|
|
||||||
- **📖 [About Documentation](docs/about/)** - Documentation about docs
|
|
||||||
- **🗂️ [Archive](docs/archive/README.md)** - Historical documentation
|
|
||||||
- **🦞 [OpenClaw Documentation](docs/openclaw/)** - Advanced AI agent ecosystem
|
|
||||||
|
|
||||||
### **🔗 External Documentation:**
|
|
||||||
- **💻 [CLI Technical Docs](docs/cli-technical/)** - Deep CLI documentation
|
|
||||||
- **📜 [Smart Contracts](docs/contracts/)** - Contract documentation
|
|
||||||
- **🧪 [Testing](docs/testing/)** - Test documentation
|
|
||||||
- **🌐 [Website](docs/website/)** - Website documentation
|
|
||||||
- **🤖 [CLI Documentation](docs/CLI_DOCUMENTATION.md)** - Complete CLI reference with advanced AI operations
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🛠️ **Installation**
|
|
||||||
|
|
||||||
### **System Requirements:**
|
|
||||||
- **Python**: 3.13.5+ (exact version required)
|
|
||||||
- **Node.js**: 24.14.0+ (exact version required)
|
|
||||||
- **Git**: Latest version
|
|
||||||
- **Docker**: Not supported (do not use)
|
|
||||||
|
|
||||||
### **🔍 Root Cause Analysis:**
|
|
||||||
The system requirements are based on actual project configuration:
|
|
||||||
- **Python 3.13.5+**: Defined in `pyproject.toml` as `requires-python = ">=3.13.5"`
|
|
||||||
- **Node.js 24.14.0+**: Defined in `config/.nvmrc` as `24.14.0`
|
|
||||||
- **No Docker Support**: Docker is not used in this project
|
|
||||||
|
|
||||||
### **🚀 Quick Installation:**
|
|
||||||
```bash
|
|
||||||
# Clone the repository
|
|
||||||
git clone https://github.com/oib/AITBC.git
|
|
||||||
cd AITBC
|
|
||||||
|
|
||||||
# Install CLI tool (requires virtual environment)
|
|
||||||
cd cli
|
|
||||||
python3 -m venv venv
|
|
||||||
source venv/bin/activate
|
source venv/bin/activate
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
# Verify installation
|
# Install dependencies
|
||||||
aitbc version
|
pip install -r requirements.txt
|
||||||
aitbc --help
|
|
||||||
|
|
||||||
# OPTIONAL: Add convenient alias for easy access
|
# Run CLI
|
||||||
echo 'alias aitbc="source /opt/aitbc/cli/venv/bin/activate && aitbc"' >> ~/.bashrc
|
./aitbc-cli --help
|
||||||
source ~/.bashrc
|
|
||||||
# Now you can use 'aitbc' from anywhere!
|
# Run training
|
||||||
|
./scripts/training/master_training_launcher.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### **🔧 Development Setup:**
|
## Development
|
||||||
```bash
|
|
||||||
# Clone the repository
|
|
||||||
git clone https://github.com/oib/AITBC.git
|
|
||||||
cd AITBC
|
|
||||||
|
|
||||||
# Install CLI tool (requires virtual environment)
|
See `docs/SETUP.md` for detailed setup instructions.
|
||||||
cd cli
|
|
||||||
python3 -m venv venv
|
|
||||||
source venv/bin/activate
|
|
||||||
pip install -e ".[dev]"
|
|
||||||
|
|
||||||
# Verify correct Python version
|
## Security
|
||||||
python3 --version # Should be 3.13.5+
|
|
||||||
|
|
||||||
# Verify correct Node.js version
|
See `security/SECURITY_VULNERABILITY_REPORT.md` for security status.
|
||||||
node --version # Should be 24.14.0+
|
|
||||||
|
|
||||||
# Run tests
|
## License
|
||||||
pytest
|
|
||||||
|
|
||||||
# Install pre-commit hooks
|
See `LICENSE` for licensing information.
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
# OPTIONAL: Add convenient alias for easy access
|
|
||||||
echo 'alias aitbc="source /opt/aitbc/cli/venv/bin/activate && aitbc"' >> ~/.bashrc
|
|
||||||
source ~/.bashrc
|
|
||||||
```
|
|
||||||
|
|
||||||
### **⚠️ Version Compliance:**
|
|
||||||
- **Python**: Must be exactly 3.13.5 or higher
|
|
||||||
- **Node.js**: Must be exactly 24.14.0 or higher
|
|
||||||
- **Docker**: Not supported - do not attempt to use
|
|
||||||
- **Package Manager**: Use pip for Python, npm for Node.js packages
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🤖 **OpenClaw Agent Usage**
|
|
||||||
|
|
||||||
### **🎓 Advanced AI Agent Ecosystem**
|
|
||||||
Our OpenClaw agents have completed the **Advanced AI Teaching Plan** and are now sophisticated AI specialists:
|
|
||||||
|
|
||||||
#### **🚀 Quick Start with OpenClaw Agents**
|
|
||||||
```bash
|
|
||||||
# Run complete advanced AI workflow
|
|
||||||
cd /opt/aitbc
|
|
||||||
./scripts/workflow-openclaw/06_advanced_ai_workflow_openclaw.sh
|
|
||||||
|
|
||||||
# Use individual agents
|
|
||||||
openclaw agent --agent GenesisAgent --session-id "my-session" --message "Execute complex AI pipeline" --thinking high
|
|
||||||
openclaw agent --agent FollowerAgent --session-id "coordination" --message "Participate in distributed AI processing" --thinking medium
|
|
||||||
openclaw agent --agent CoordinatorAgent --session-id "orchestration" --message "Coordinate multi-agent workflow" --thinking high
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **🤖 Advanced AI Operations**
|
|
||||||
```bash
|
|
||||||
# Phase 1: Advanced AI Workflow Orchestration
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type parallel --prompt "Complex AI pipeline for medical diagnosis" --payment 500
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type ensemble --prompt "Parallel AI processing with ensemble validation" --payment 600
|
|
||||||
|
|
||||||
# Phase 2: Multi-Model AI Pipelines
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type multimodal --prompt "Multi-modal customer feedback analysis" --payment 1000
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type fusion --prompt "Cross-modal fusion with joint reasoning" --payment 1200
|
|
||||||
|
|
||||||
# Phase 3: AI Resource Optimization
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type resource-allocation --prompt "Dynamic resource allocation system" --payment 800
|
|
||||||
./aitbc-cli ai-submit --wallet genesis-ops --type performance-tuning --prompt "AI performance optimization" --payment 1000
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **🔄 Resource Management**
|
|
||||||
```bash
|
|
||||||
# Check resource status
|
|
||||||
./aitbc-cli resource status
|
|
||||||
|
|
||||||
# Allocate resources for AI operations
|
|
||||||
./aitbc-cli resource allocate --agent-id "ai-optimization-agent" --cpu 2 --memory 4096 --duration 3600
|
|
||||||
|
|
||||||
# Monitor AI jobs
|
|
||||||
./aitbc-cli ai-ops --action status --job-id "latest"
|
|
||||||
./aitbc-cli ai-ops --action results --job-id "latest"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **📊 Simulation Framework**
|
|
||||||
```bash
|
|
||||||
# Simulate blockchain operations
|
|
||||||
./aitbc-cli simulate blockchain --blocks 10 --transactions 50 --delay 1.0
|
|
||||||
|
|
||||||
# Simulate wallet operations
|
|
||||||
./aitbc-cli simulate wallets --wallets 5 --balance 1000 --transactions 20
|
|
||||||
|
|
||||||
# Simulate price movements
|
|
||||||
./aitbc-cli simulate price --price 100 --volatility 0.05 --timesteps 100
|
|
||||||
|
|
||||||
# Simulate network topology
|
|
||||||
./aitbc-cli simulate network --nodes 3 --failure-rate 0.05
|
|
||||||
|
|
||||||
# Simulate AI job processing
|
|
||||||
./aitbc-cli simulate ai-jobs --jobs 10 --models "text-generation,image-generation"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### **🎓 Agent Capabilities Summary**
|
|
||||||
- **🤖 Genesis Agent**: Complex AI operations, resource management, performance optimization
|
|
||||||
- **🤖 Follower Agent**: Distributed AI coordination, resource monitoring, cost optimization
|
|
||||||
- **🤖 Coordinator Agent**: Multi-agent orchestration, cross-node coordination
|
|
||||||
- **🤖 AI Resource Agent**: Resource allocation, performance tuning, demand forecasting
|
|
||||||
- **🤖 Multi-Modal Agent**: Multi-modal processing, cross-modal fusion, ensemble management
|
|
||||||
|
|
||||||
**📚 Detailed Documentation**: [OpenClaw Agent Capabilities](docs/openclaw/OPENCLAW_AGENT_CAPABILITIES_ADVANCED.md)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 **Usage Examples**
|
|
||||||
|
|
||||||
### **💻 CLI Usage:**
|
|
||||||
```bash
|
|
||||||
# Check system status
|
|
||||||
aitbc status
|
|
||||||
|
|
||||||
# Create wallet
|
|
||||||
aitbc wallet create
|
|
||||||
|
|
||||||
# Start mining
|
|
||||||
aitbc miner start
|
|
||||||
|
|
||||||
# Check balance
|
|
||||||
aitbc wallet balance
|
|
||||||
|
|
||||||
# Trade on marketplace
|
|
||||||
aitbc marketplace trade --pair AITBC/USDT --amount 100
|
|
||||||
```
|
|
||||||
|
|
||||||
### **🤖 AI Agent Development:**
|
|
||||||
```python
|
|
||||||
from aitbc.agent import AITBCAgent
|
|
||||||
|
|
||||||
# Create custom agent
|
|
||||||
agent = AITBCAgent(
|
|
||||||
name="MyTradingBot",
|
|
||||||
strategy="ml_trading",
|
|
||||||
config="agent_config.yaml"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
agent.start()
|
|
||||||
```
|
|
||||||
|
|
||||||
### **⛓️ Blockchain Integration:**
|
|
||||||
```python
|
|
||||||
from aitbc.blockchain import AITBCBlockchain
|
|
||||||
|
|
||||||
# Connect to blockchain
|
|
||||||
blockchain = AITBCBlockchain()
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
tx = blockchain.create_transaction(
|
|
||||||
to="0x...",
|
|
||||||
amount=100,
|
|
||||||
asset="AITBC"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Send transaction
|
|
||||||
result = blockchain.send_transaction(tx)
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🧪 **Testing**
|
|
||||||
|
|
||||||
### **📊 Test Coverage:**
|
|
||||||
- **Total Tests**: 67 tests
|
|
||||||
- **Pass Rate**: 100% (67/67 passing)
|
|
||||||
- **Coverage**: Comprehensive test suite
|
|
||||||
- **Quality**: Production-ready codebase
|
|
||||||
|
|
||||||
### **🚀 Run Tests:**
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
pytest
|
|
||||||
|
|
||||||
# Run with coverage
|
|
||||||
pytest --cov=aitbc
|
|
||||||
|
|
||||||
# Run specific test file
|
|
||||||
pytest tests/test_cli.py
|
|
||||||
|
|
||||||
# Run with verbose output
|
|
||||||
pytest -v
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔒 **Security**
|
|
||||||
|
|
||||||
### **🛡️ Security Features:**
|
|
||||||
- **🔐 Multi-Sig Wallets**: Require multiple signatures for transactions
|
|
||||||
- **⏰ Time-Lock Transactions**: Delayed execution for security
|
|
||||||
- **🔍 KYC/AML Integration**: Compliance with regulations
|
|
||||||
- **🛡️ Secure Pickle**: Safe serialization/deserialization
|
|
||||||
- **🔑 Encrypted Keystores**: Secure key storage
|
|
||||||
- **🚨 Vulnerability Scanning**: Regular security audits
|
|
||||||
|
|
||||||
### **🔍 Security Audits:**
|
|
||||||
- **✅ Smart Contract Audits**: Completed and verified
|
|
||||||
- **✅ Code Security**: Vulnerability scanning passed
|
|
||||||
- **✅ Infrastructure Security**: Production security hardened
|
|
||||||
- **✅ Data Protection**: Privacy-preserving features verified
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🌐 **Ecosystem**
|
|
||||||
|
|
||||||
### **🔄 Components:**
|
|
||||||
- **🏗️ [Coordinator API](apps/coordinator-api/)** - Central coordination service
|
|
||||||
- **⛓️ [Blockchain Node](apps/blockchain-node/)** - Core blockchain infrastructure
|
|
||||||
- **⛏️ [Miner Node](apps/miner-node/)** - Mining and validation
|
|
||||||
- **💼 [Browser Wallet](apps/browser-wallet/)** - Web-based wallet
|
|
||||||
- **🏪 [Marketplace Web](apps/marketplace-web/)** - Trading interface
|
|
||||||
- **🔍 [Explorer Web](apps/explorer-web/)** - Blockchain explorer
|
|
||||||
- **🤖 [AI Agent SDK](packages/py/aitbc-agent-sdk/)** - Agent development kit
|
|
||||||
|
|
||||||
### **👥 Community:**
|
|
||||||
- **💬 [Discord](https://discord.gg/aitbc)** - Community chat
|
|
||||||
- **📖 [Forum](https://forum.aitbc.net)** - Discussion forum
|
|
||||||
- **🐙 [GitHub](https://github.com/oib/AITBC)** - Source code
|
|
||||||
- **📚 [Documentation](https://docs.aitbc.net)** - Full documentation
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🤝 **Contributing**
|
|
||||||
|
|
||||||
We welcome contributions! Here's how to get started:
|
|
||||||
|
|
||||||
### **📋 Contribution Guidelines:**
|
|
||||||
1. **Fork** the repository
|
|
||||||
2. **Create** a feature branch
|
|
||||||
3. **Make** your changes
|
|
||||||
4. **Test** thoroughly
|
|
||||||
5. **Submit** a pull request
|
|
||||||
|
|
||||||
### **🛠️ Development Workflow:**
|
|
||||||
```bash
|
|
||||||
# Fork and clone
|
|
||||||
git clone https://github.com/YOUR_USERNAME/AITBC.git
|
|
||||||
cd AITBC
|
|
||||||
|
|
||||||
# Create feature branch
|
|
||||||
git checkout -b feature/amazing-feature
|
|
||||||
|
|
||||||
# Make changes and test
|
|
||||||
pytest
|
|
||||||
|
|
||||||
# Commit and push
|
|
||||||
git commit -m "Add amazing feature"
|
|
||||||
git push origin feature/amazing-feature
|
|
||||||
|
|
||||||
# Create pull request
|
|
||||||
```
|
|
||||||
|
|
||||||
### **📝 Code Standards:**
|
|
||||||
- **Python**: Follow PEP 8
|
|
||||||
- **JavaScript**: Use ESLint configuration
|
|
||||||
- **Documentation**: Follow our template standards
|
|
||||||
- **Testing**: Maintain 100% test coverage
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎉 **Achievements & Recognition**
|
|
||||||
|
|
||||||
### **🏆 Major Achievements:**
|
|
||||||
- **🎓 Advanced AI Teaching Plan**: 100% complete (3 phases, 6 sessions)
|
|
||||||
- **🤖 OpenClaw Agent Mastery**: Advanced AI specialists with real-world capabilities
|
|
||||||
- **📚 Perfect Documentation**: 10/10 quality score achieved
|
|
||||||
- **<2A> Production Ready**: Fully operational blockchain infrastructure
|
|
||||||
- **⚡ Advanced AI Operations**: Complex workflow orchestration, multi-model pipelines, resource optimization
|
|
||||||
|
|
||||||
### **🎯 Real-World Applications:**
|
|
||||||
- **🏥 Medical Diagnosis**: Complex AI pipelines with ensemble validation
|
|
||||||
- **📊 Customer Feedback Analysis**: Multi-modal processing with cross-modal attention
|
|
||||||
- **🚀 AI Service Provider**: Dynamic resource allocation and performance optimization
|
|
||||||
- **⛓️ Blockchain Operations**: Advanced multi-chain support with agent coordination
|
|
||||||
|
|
||||||
### **📊 Performance Metrics:**
|
|
||||||
- **AI Job Processing**: 100% functional with advanced job types
|
|
||||||
- **Resource Management**: Real-time allocation and monitoring
|
|
||||||
- **Cross-Node Coordination**: Smart contract messaging operational
|
|
||||||
- **Performance Optimization**: Sub-100ms inference with high utilization
|
|
||||||
- **Testing Coverage**: 91% success rate with comprehensive validation
|
|
||||||
|
|
||||||
### **🔮 Future Roadmap:**
|
|
||||||
- **📦 Modular Workflow Implementation**: Split large workflows into manageable modules
|
|
||||||
- **🤝 Enhanced Agent Coordination**: Advanced multi-agent communication patterns
|
|
||||||
- **🌐 Scalable Architectures**: Distributed decision making and scaling strategies
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## <20>📄 **License**
|
|
||||||
|
|
||||||
This project is licensed under the **MIT License** - see the [LICENSE](LICENSE) file for details.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🆘 **Support & Help**
|
|
||||||
|
|
||||||
### **📚 Getting Help:**
|
|
||||||
- **📖 [Documentation](docs/README.md)** - Comprehensive guides
|
|
||||||
- **🤖 [OpenClaw Agent Documentation](docs/openclaw/OPENCLAW_AGENT_CAPABILITIES_ADVANCED.md)** - Advanced AI agent capabilities
|
|
||||||
- **💬 [Discord](https://discord.gg/aitbc)** - Community support
|
|
||||||
- **🐛 [Issues](https://github.com/oib/AITBC/issues)** - Report bugs
|
|
||||||
- **💡 [Discussions](https://github.com/oib/AITBC/discussions)** - Feature requests
|
|
||||||
|
|
||||||
### **📞 Contact & Connect:**
|
|
||||||
- **🌊 Windsurf**: [https://windsurf.com/refer?referral_code=4j75hl1x7ibz3yj8](https://windsurf.com/refer?referral_code=4j75hl1x7ibz3yj8)
|
|
||||||
- **🐦 X**: [@bubuIT_net](https://x.com/bubuIT_net)
|
|
||||||
- **📧 Email**: andreas.fleckl@bubuit.net
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 **Roadmap**
|
|
||||||
|
|
||||||
### **🚀 Upcoming Features:**
|
|
||||||
- **🔮 Advanced AI Models**: Next-generation ML algorithms
|
|
||||||
- **🌐 Cross-Chain DeFi**: DeFi protocol integration
|
|
||||||
- **📱 Mobile Apps**: iOS and Android applications
|
|
||||||
- **🔮 Quantum Computing**: Quantum-resistant cryptography
|
|
||||||
- **🌍 Global Expansion**: Worldwide node deployment
|
|
||||||
|
|
||||||
### **📈 Development Phases:**
|
|
||||||
- **Phase 1**: Core infrastructure ✅ **COMPLETED**
|
|
||||||
- **Phase 2**: AI integration ✅ **COMPLETED**
|
|
||||||
- **Phase 3**: Exchange integration ✅ **COMPLETED**
|
|
||||||
- **Phase 4**: Ecosystem expansion 🔄 **IN PROGRESS**
|
|
||||||
- **Phase 5**: Global deployment 📋 **PLANNED**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📊 **Project Statistics**
|
|
||||||
|
|
||||||
### **📁 Repository Stats:**
|
|
||||||
- **Total Files**: 500+ files
|
|
||||||
- **Documentation**: Perfect 10/10 quality score
|
|
||||||
- **Test Coverage**: 100% (67/67 tests passing)
|
|
||||||
- **Languages**: Python, JavaScript, Solidity, Rust
|
|
||||||
- **Lines of Code**: 100,000+ lines
|
|
||||||
|
|
||||||
### **👥 Community Stats:**
|
|
||||||
- **Contributors**: 50+ developers
|
|
||||||
- **Stars**: 1,000+ GitHub stars
|
|
||||||
- **Forks**: 200+ forks
|
|
||||||
- **Issues**: 95% resolved
|
|
||||||
- **Pull Requests**: 300+ merged
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎉 **Achievements**
|
|
||||||
|
|
||||||
### **🏆 Major Milestones:**
|
|
||||||
- **✅ Production Launch**: March 18, 2026
|
|
||||||
- **🎉 Perfect Documentation**: 10/10 quality score achieved
|
|
||||||
- **🤖 AI Integration**: Advanced ML models deployed
|
|
||||||
- **⛓️ Multi-Chain**: 7-layer architecture operational
|
|
||||||
- **🔒 Security**: Complete security framework
|
|
||||||
- **📚 Documentation**: World-class documentation system
|
|
||||||
|
|
||||||
### **🌟 Recognition:**
|
|
||||||
- **🏆 Best Documentation**: Perfect 10/10 quality score
|
|
||||||
- **🚀 Most Innovative**: AI-blockchain integration
|
|
||||||
- **🔒 Most Secure**: Comprehensive security framework
|
|
||||||
- **📚 Best Developer Experience**: Comprehensive CLI and tools
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 **Get Started Now!**
|
|
||||||
|
|
||||||
**🎯 Ready to dive in?** Choose your path:
|
|
||||||
|
|
||||||
1. **👤 [I'm a User](docs/beginner/README.md)** - Start using AITBC
|
|
||||||
2. **👨💻 [I'm a Developer](docs/beginner/02_project/)** - Build on AITBC
|
|
||||||
3. **⛏️ [I'm a Miner](docs/beginner/04_miners/)** - Run mining operations
|
|
||||||
4. **🔧 [I'm an Admin](docs/beginner/05_cli/)** - Manage systems
|
|
||||||
5. **🎓 [I'm an Expert](docs/expert/README.md)** - Deep expertise
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**🎉 Welcome to AITBC - The Future of AI-Powered Blockchain!**
|
|
||||||
|
|
||||||
*Join us in revolutionizing the intersection of artificial intelligence and blockchain technology.*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Last Updated**: 2026-03-26
|
|
||||||
**Version**: 0.2.2
|
|
||||||
**Quality Score**: 10/10 (Perfect)
|
|
||||||
**Status**: Production Ready
|
|
||||||
**License**: MIT
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*🚀 AITBC - Building the future of AI and blockchain*
|
|
||||||
|
|||||||
152
SETUP.md
152
SETUP.md
@@ -1,152 +0,0 @@
|
|||||||
# AITBC Setup Guide
|
|
||||||
|
|
||||||
## Quick Setup (New Host)
|
|
||||||
|
|
||||||
Run this single command on any new host to install AITBC:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo bash <(curl -sSL https://raw.githubusercontent.com/oib/aitbc/main/setup.sh)
|
|
||||||
```
|
|
||||||
|
|
||||||
Or clone and run manually:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo git clone https://gitea.bubuit.net/oib/aitbc.git /opt/aitbc
|
|
||||||
cd /opt/aitbc
|
|
||||||
sudo chmod +x setup.sh
|
|
||||||
sudo ./setup.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## What the Setup Script Does
|
|
||||||
|
|
||||||
1. **Prerequisites Check**
|
|
||||||
- Verifies Python 3.13.5+, pip3, git, systemd
|
|
||||||
- Checks for root privileges
|
|
||||||
|
|
||||||
2. **Repository Setup**
|
|
||||||
- Clones AITBC repository to `/opt/aitbc`
|
|
||||||
- Handles multiple repository URLs for reliability
|
|
||||||
|
|
||||||
3. **Virtual Environments**
|
|
||||||
- Creates Python venvs for each service
|
|
||||||
- Installs dependencies from `requirements.txt` when available
|
|
||||||
- Falls back to core dependencies if requirements missing
|
|
||||||
|
|
||||||
4. **Runtime Directories**
|
|
||||||
- Creates standard Linux directories:
|
|
||||||
- `/var/lib/aitbc/keystore/` - Blockchain keys
|
|
||||||
- `/var/lib/aitbc/data/` - Database files
|
|
||||||
- `/var/lib/aitbc/logs/` - Application logs
|
|
||||||
- `/etc/aitbc/` - Configuration files
|
|
||||||
- Sets proper permissions and ownership
|
|
||||||
|
|
||||||
5. **Systemd Services**
|
|
||||||
- Installs service files to `/etc/systemd/system/`
|
|
||||||
- Enables auto-start on boot
|
|
||||||
- Provides fallback manual startup
|
|
||||||
|
|
||||||
6. **Service Management**
|
|
||||||
- Creates `/opt/aitbc/start-services.sh` for manual control
|
|
||||||
- Creates `/opt/aitbc/health-check.sh` for monitoring
|
|
||||||
- Sets up logging to `/var/log/aitbc-*.log`
|
|
||||||
|
|
||||||
## Runtime Directories
|
|
||||||
|
|
||||||
AITBC uses standard Linux system directories for runtime data:
|
|
||||||
|
|
||||||
```
|
|
||||||
/var/lib/aitbc/
|
|
||||||
├── keystore/ # Blockchain private keys (700 permissions)
|
|
||||||
├── data/ # Database files (.db, .sqlite)
|
|
||||||
└── logs/ # Application logs
|
|
||||||
|
|
||||||
/etc/aitbc/ # Configuration files
|
|
||||||
/var/log/aitbc/ # System logging (symlink)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Security Notes
|
|
||||||
- **Keystore**: Restricted to root/aitbc user only
|
|
||||||
- **Data**: Writable by services, readable by admin
|
|
||||||
- **Logs**: Rotated automatically by logrotate
|
|
||||||
|
|
||||||
## Service Endpoints
|
|
||||||
|
|
||||||
| Service | Port | Health Endpoint |
|
|
||||||
|---------|------|----------------|
|
|
||||||
| Wallet API | 8003 | `http://localhost:8003/health` |
|
|
||||||
| Exchange API | 8001 | `http://localhost:8001/api/health` |
|
|
||||||
| Coordinator API | 8000 | `http://localhost:8000/health` |
|
|
||||||
| Blockchain RPC | 8545 | `http://localhost:8545` |
|
|
||||||
|
|
||||||
## Management Commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check service health
|
|
||||||
/opt/aitbc/health-check.sh
|
|
||||||
|
|
||||||
# Restart all services
|
|
||||||
/opt/aitbc/start-services.sh
|
|
||||||
|
|
||||||
# View logs (new standard locations)
|
|
||||||
tail -f /var/lib/aitbc/logs/aitbc-wallet.log
|
|
||||||
tail -f /var/lib/aitbc/logs/aitbc-coordinator.log
|
|
||||||
tail -f /var/lib/aitbc/logs/aitbc-exchange.log
|
|
||||||
|
|
||||||
# Check keystore
|
|
||||||
ls -la /var/lib/aitbc/keystore/
|
|
||||||
|
|
||||||
# Systemd control
|
|
||||||
systemctl status aitbc-wallet
|
|
||||||
systemctl restart aitbc-coordinator-api
|
|
||||||
systemctl stop aitbc-exchange-api
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Services Not Starting
|
|
||||||
1. Check logs: `tail -f /var/lib/aitbc/logs/aitbc-*.log`
|
|
||||||
2. Verify ports: `netstat -tlnp | grep ':800'`
|
|
||||||
3. Check processes: `ps aux | grep python`
|
|
||||||
4. Verify runtime directories: `ls -la /var/lib/aitbc/`
|
|
||||||
|
|
||||||
### Missing Dependencies
|
|
||||||
The setup script handles missing `requirements.txt` files by installing core dependencies:
|
|
||||||
- fastapi
|
|
||||||
- uvicorn
|
|
||||||
- pydantic
|
|
||||||
- httpx
|
|
||||||
- python-dotenv
|
|
||||||
|
|
||||||
### Port Conflicts
|
|
||||||
Services use these default ports. If conflicts exist:
|
|
||||||
1. Kill conflicting processes: `kill <pid>`
|
|
||||||
2. Modify service files to use different ports
|
|
||||||
3. Restart services
|
|
||||||
|
|
||||||
## Development Mode
|
|
||||||
|
|
||||||
For development with manual control:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/aitbc/apps/wallet
|
|
||||||
source .venv/bin/activate
|
|
||||||
python simple_daemon.py
|
|
||||||
|
|
||||||
cd /opt/aitbc/apps/exchange
|
|
||||||
source .venv/bin/activate
|
|
||||||
python simple_exchange_api.py
|
|
||||||
|
|
||||||
cd /opt/aitbc/apps/coordinator-api/src
|
|
||||||
source ../.venv/bin/activate
|
|
||||||
python -m uvicorn app.main:app --host 0.0.0.0 --port 8000
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Considerations
|
|
||||||
|
|
||||||
For production deployment:
|
|
||||||
1. Configure proper environment variables
|
|
||||||
2. Set up reverse proxy (nginx)
|
|
||||||
3. Configure SSL certificates
|
|
||||||
4. Set up log rotation
|
|
||||||
5. Configure monitoring and alerts
|
|
||||||
6. Use proper database setup (PostgreSQL/Redis)
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Integration Layer
|
|
||||||
Connects agent protocols to existing AITBC services
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import aiohttp
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any, List, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
class AITBCServiceIntegration:
|
|
||||||
"""Integration layer for AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.service_endpoints = {
|
|
||||||
"coordinator_api": "http://localhost:8000",
|
|
||||||
"blockchain_rpc": "http://localhost:8006",
|
|
||||||
"exchange_service": "http://localhost:8001",
|
|
||||||
"marketplace": "http://localhost:8002",
|
|
||||||
"agent_registry": "http://localhost:8013"
|
|
||||||
}
|
|
||||||
self.session = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
self.session = aiohttp.ClientSession()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
if self.session:
|
|
||||||
await self.session.close()
|
|
||||||
|
|
||||||
async def get_blockchain_info(self) -> Dict[str, Any]:
|
|
||||||
"""Get blockchain information"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['blockchain_rpc']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_exchange_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get exchange service status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def get_coordinator_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get coordinator API status"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['coordinator_api']}/health") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "unavailable"}
|
|
||||||
|
|
||||||
async def submit_transaction(self, transaction_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Submit transaction to blockchain"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['blockchain_rpc']}/rpc/submit",
|
|
||||||
json=transaction_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def get_market_data(self, symbol: str = "AITBC/BTC") -> Dict[str, Any]:
|
|
||||||
"""Get market data from exchange"""
|
|
||||||
try:
|
|
||||||
async with self.session.get(f"{self.service_endpoints['exchange_service']}/api/market/{symbol}") as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
async def register_agent_with_coordinator(self, agent_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Register agent with coordinator"""
|
|
||||||
try:
|
|
||||||
async with self.session.post(
|
|
||||||
f"{self.service_endpoints['agent_registry']}/api/agents/register",
|
|
||||||
json=agent_data
|
|
||||||
) as response:
|
|
||||||
return await response.json()
|
|
||||||
except Exception as e:
|
|
||||||
return {"error": str(e), "status": "failed"}
|
|
||||||
|
|
||||||
class AgentServiceBridge:
|
|
||||||
"""Bridge between agents and AITBC services"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.integration = AITBCServiceIntegration()
|
|
||||||
self.active_agents = {}
|
|
||||||
|
|
||||||
async def start_agent(self, agent_id: str, agent_config: Dict[str, Any]) -> bool:
|
|
||||||
"""Start an agent with service integration"""
|
|
||||||
try:
|
|
||||||
# Register agent with coordinator
|
|
||||||
async with self.integration as integration:
|
|
||||||
registration_result = await integration.register_agent_with_coordinator({
|
|
||||||
"name": agent_id,
|
|
||||||
"type": agent_config.get("type", "generic"),
|
|
||||||
"capabilities": agent_config.get("capabilities", []),
|
|
||||||
"chain_id": agent_config.get("chain_id", "ait-mainnet"),
|
|
||||||
"endpoint": agent_config.get("endpoint", f"http://localhost:{8000 + len(self.active_agents) + 10}")
|
|
||||||
})
|
|
||||||
|
|
||||||
# The registry returns the created agent dict on success, not a {"status": "ok"} wrapper
|
|
||||||
if registration_result and "id" in registration_result:
|
|
||||||
self.active_agents[agent_id] = {
|
|
||||||
"config": agent_config,
|
|
||||||
"registration": registration_result,
|
|
||||||
"started_at": datetime.utcnow()
|
|
||||||
}
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Registration failed: {registration_result}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Failed to start agent {agent_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop_agent(self, agent_id: str) -> bool:
|
|
||||||
"""Stop an agent"""
|
|
||||||
if agent_id in self.active_agents:
|
|
||||||
del self.active_agents[agent_id]
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def get_agent_status(self, agent_id: str) -> Dict[str, Any]:
|
|
||||||
"""Get agent status with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "not_found"}
|
|
||||||
|
|
||||||
agent_info = self.active_agents[agent_id]
|
|
||||||
|
|
||||||
async with self.integration as integration:
|
|
||||||
# Get service statuses
|
|
||||||
blockchain_status = await integration.get_blockchain_info()
|
|
||||||
exchange_status = await integration.get_exchange_status()
|
|
||||||
coordinator_status = await integration.get_coordinator_status()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"agent_id": agent_id,
|
|
||||||
"status": "active",
|
|
||||||
"started_at": agent_info["started_at"].isoformat(),
|
|
||||||
"services": {
|
|
||||||
"blockchain": blockchain_status,
|
|
||||||
"exchange": exchange_status,
|
|
||||||
"coordinator": coordinator_status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async def execute_agent_task(self, agent_id: str, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute agent task with service integration"""
|
|
||||||
if agent_id not in self.active_agents:
|
|
||||||
return {"status": "error", "message": "Agent not found"}
|
|
||||||
|
|
||||||
task_type = task_data.get("type")
|
|
||||||
|
|
||||||
if task_type == "market_analysis":
|
|
||||||
return await self._execute_market_analysis(task_data)
|
|
||||||
elif task_type == "trading":
|
|
||||||
return await self._execute_trading_task(task_data)
|
|
||||||
elif task_type == "compliance_check":
|
|
||||||
return await self._execute_compliance_check(task_data)
|
|
||||||
else:
|
|
||||||
return {"status": "error", "message": f"Unknown task type: {task_type}"}
|
|
||||||
|
|
||||||
async def _execute_market_analysis(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute market analysis task"""
|
|
||||||
try:
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Perform basic analysis
|
|
||||||
analysis_result = {
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"market_data": market_data,
|
|
||||||
"analysis": {
|
|
||||||
"trend": "neutral",
|
|
||||||
"volatility": "medium",
|
|
||||||
"recommendation": "hold"
|
|
||||||
},
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": analysis_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_trading_task(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute trading task"""
|
|
||||||
try:
|
|
||||||
# Get market data first
|
|
||||||
async with self.integration as integration:
|
|
||||||
market_data = await integration.get_market_data(task_data.get("symbol", "AITBC/BTC"))
|
|
||||||
|
|
||||||
# Create transaction
|
|
||||||
transaction = {
|
|
||||||
"type": "trade",
|
|
||||||
"symbol": task_data.get("symbol", "AITBC/BTC"),
|
|
||||||
"side": task_data.get("side", "buy"),
|
|
||||||
"amount": task_data.get("amount", 0.1),
|
|
||||||
"price": task_data.get("price", market_data.get("price", 0.001))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Submit transaction
|
|
||||||
tx_result = await integration.submit_transaction(transaction)
|
|
||||||
|
|
||||||
return {"status": "success", "transaction": tx_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
|
|
||||||
async def _execute_compliance_check(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Execute compliance check task"""
|
|
||||||
try:
|
|
||||||
# Basic compliance check
|
|
||||||
compliance_result = {
|
|
||||||
"user_id": task_data.get("user_id"),
|
|
||||||
"check_type": task_data.get("check_type", "basic"),
|
|
||||||
"status": "passed",
|
|
||||||
"checks_performed": ["kyc", "aml", "sanctions"],
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return {"status": "success", "result": compliance_result}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": str(e)}
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Compliance Agent
|
|
||||||
Automated compliance and regulatory monitoring agent
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class ComplianceAgent:
|
|
||||||
"""Automated compliance agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.check_interval = config.get("check_interval", 300) # 5 minutes
|
|
||||||
self.monitored_entities = config.get("monitored_entities", [])
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start compliance agent"""
|
|
||||||
try:
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "compliance",
|
|
||||||
"capabilities": ["kyc_check", "aml_screening", "regulatory_reporting"],
|
|
||||||
"endpoint": f"http://localhost:8006"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Compliance agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start compliance agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting compliance agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop compliance agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Compliance agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_compliance_loop(self):
|
|
||||||
"""Main compliance monitoring loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for entity in self.monitored_entities:
|
|
||||||
await self._perform_compliance_check(entity)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in compliance loop: {e}")
|
|
||||||
await asyncio.sleep(30) # Wait before retrying
|
|
||||||
|
|
||||||
async def _perform_compliance_check(self, entity_id: str) -> None:
|
|
||||||
"""Perform compliance check for entity"""
|
|
||||||
try:
|
|
||||||
compliance_task = {
|
|
||||||
"type": "compliance_check",
|
|
||||||
"user_id": entity_id,
|
|
||||||
"check_type": "full",
|
|
||||||
"monitored_activities": ["trading", "transfers", "wallet_creation"]
|
|
||||||
}
|
|
||||||
|
|
||||||
result = await self.bridge.execute_agent_task(self.agent_id, compliance_task)
|
|
||||||
|
|
||||||
if result.get("status") == "success":
|
|
||||||
compliance_result = result["result"]
|
|
||||||
await self._handle_compliance_result(entity_id, compliance_result)
|
|
||||||
else:
|
|
||||||
print(f"Compliance check failed for {entity_id}: {result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error performing compliance check for {entity_id}: {e}")
|
|
||||||
|
|
||||||
async def _handle_compliance_result(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Handle compliance check result"""
|
|
||||||
status = result.get("status", "unknown")
|
|
||||||
|
|
||||||
if status == "passed":
|
|
||||||
print(f"✅ Compliance check passed for {entity_id}")
|
|
||||||
elif status == "failed":
|
|
||||||
print(f"❌ Compliance check failed for {entity_id}")
|
|
||||||
# Trigger alert or further investigation
|
|
||||||
await self._trigger_compliance_alert(entity_id, result)
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Compliance check inconclusive for {entity_id}")
|
|
||||||
|
|
||||||
async def _trigger_compliance_alert(self, entity_id: str, result: Dict[str, Any]) -> None:
|
|
||||||
"""Trigger compliance alert"""
|
|
||||||
alert_data = {
|
|
||||||
"entity_id": entity_id,
|
|
||||||
"alert_type": "compliance_failure",
|
|
||||||
"severity": "high",
|
|
||||||
"details": result,
|
|
||||||
"timestamp": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# In a real implementation, this would send to alert system
|
|
||||||
print(f"🚨 COMPLIANCE ALERT: {json.dumps(alert_data, indent=2)}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
status = await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
status["monitored_entities"] = len(self.monitored_entities)
|
|
||||||
status["check_interval"] = self.check_interval
|
|
||||||
return status
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main compliance agent execution"""
|
|
||||||
agent_id = "compliance-agent-001"
|
|
||||||
config = {
|
|
||||||
"check_interval": 60, # 1 minute for testing
|
|
||||||
"monitored_entities": ["user001", "user002", "user003"]
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = ComplianceAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run compliance loop
|
|
||||||
await agent.run_compliance_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down compliance agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start compliance agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Coordinator Service
|
|
||||||
Agent task coordination and management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Coordinator API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_coordinator.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS tasks (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
task_type TEXT NOT NULL,
|
|
||||||
payload TEXT NOT NULL,
|
|
||||||
required_capabilities TEXT NOT NULL,
|
|
||||||
priority TEXT NOT NULL,
|
|
||||||
status TEXT NOT NULL,
|
|
||||||
assigned_agent_id TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
result TEXT
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Task(BaseModel):
|
|
||||||
id: str
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str
|
|
||||||
status: str
|
|
||||||
assigned_agent_id: Optional[str] = None
|
|
||||||
|
|
||||||
class TaskCreation(BaseModel):
|
|
||||||
task_type: str
|
|
||||||
payload: Dict[str, Any]
|
|
||||||
required_capabilities: List[str]
|
|
||||||
priority: str = "normal"
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/tasks", response_model=Task)
|
|
||||||
async def create_task(task: TaskCreation):
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO tasks (id, task_type, payload, required_capabilities, priority, status)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
task_id, task.task_type, json.dumps(task.payload),
|
|
||||||
json.dumps(task.required_capabilities), task.priority, "pending"
|
|
||||||
))
|
|
||||||
|
|
||||||
return Task(
|
|
||||||
id=task_id,
|
|
||||||
task_type=task.task_type,
|
|
||||||
payload=task.payload,
|
|
||||||
required_capabilities=task.required_capabilities,
|
|
||||||
priority=task.priority,
|
|
||||||
status="pending"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/tasks", response_model=List[Task])
|
|
||||||
async def list_tasks(status: Optional[str] = None):
|
|
||||||
"""List tasks with optional status filter"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM tasks"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if status:
|
|
||||||
query += " WHERE status = ?"
|
|
||||||
params.append(status)
|
|
||||||
|
|
||||||
tasks = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Task(
|
|
||||||
id=task["id"],
|
|
||||||
task_type=task["task_type"],
|
|
||||||
payload=json.loads(task["payload"]),
|
|
||||||
required_capabilities=json.loads(task["required_capabilities"]),
|
|
||||||
priority=task["priority"],
|
|
||||||
status=task["status"],
|
|
||||||
assigned_agent_id=task["assigned_agent_id"]
|
|
||||||
)
|
|
||||||
for task in tasks
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8012)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AITBC Agent Protocols Environment Configuration
|
|
||||||
# Copy this file to .env and update with your secure values
|
|
||||||
|
|
||||||
# Agent Protocol Encryption Key (generate a strong, unique key)
|
|
||||||
AITBC_AGENT_PROTOCOL_KEY=your-secure-encryption-key-here
|
|
||||||
|
|
||||||
# Agent Protocol Salt (generate a unique salt value)
|
|
||||||
AITBC_AGENT_PROTOCOL_SALT=your-unique-salt-value-here
|
|
||||||
|
|
||||||
# Agent Registry Configuration
|
|
||||||
AGENT_REGISTRY_HOST=0.0.0.0
|
|
||||||
AGENT_REGISTRY_PORT=8003
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
AGENT_REGISTRY_DB_PATH=agent_registry.db
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
AGENT_PROTOCOL_TIMEOUT=300
|
|
||||||
AGENT_PROTOCOL_MAX_RETRIES=3
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Protocols Package
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .message_protocol import MessageProtocol, MessageTypes, AgentMessageClient
|
|
||||||
from .task_manager import TaskManager, TaskStatus, TaskPriority, Task
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"MessageProtocol",
|
|
||||||
"MessageTypes",
|
|
||||||
"AgentMessageClient",
|
|
||||||
"TaskManager",
|
|
||||||
"TaskStatus",
|
|
||||||
"TaskPriority",
|
|
||||||
"Task"
|
|
||||||
]
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
"""
|
|
||||||
Message Protocol for AITBC Agents
|
|
||||||
Handles message creation, routing, and delivery between agents
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class MessageTypes(Enum):
|
|
||||||
"""Message type enumeration"""
|
|
||||||
TASK_REQUEST = "task_request"
|
|
||||||
TASK_RESPONSE = "task_response"
|
|
||||||
HEARTBEAT = "heartbeat"
|
|
||||||
STATUS_UPDATE = "status_update"
|
|
||||||
ERROR = "error"
|
|
||||||
DATA = "data"
|
|
||||||
|
|
||||||
class MessageProtocol:
|
|
||||||
"""Message protocol handler for agent communication"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.messages = []
|
|
||||||
self.message_handlers = {}
|
|
||||||
|
|
||||||
def create_message(
|
|
||||||
self,
|
|
||||||
sender_id: str,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any],
|
|
||||||
message_id: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Create a new message"""
|
|
||||||
if message_id is None:
|
|
||||||
message_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
message = {
|
|
||||||
"message_id": message_id,
|
|
||||||
"sender_id": sender_id,
|
|
||||||
"receiver_id": receiver_id,
|
|
||||||
"message_type": message_type.value,
|
|
||||||
"content": content,
|
|
||||||
"timestamp": datetime.utcnow().isoformat(),
|
|
||||||
"status": "pending"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.messages.append(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def send_message(self, message: Dict[str, Any]) -> bool:
|
|
||||||
"""Send a message to the receiver"""
|
|
||||||
try:
|
|
||||||
message["status"] = "sent"
|
|
||||||
message["sent_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return True
|
|
||||||
except Exception:
|
|
||||||
message["status"] = "failed"
|
|
||||||
return False
|
|
||||||
|
|
||||||
def receive_message(self, message_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Receive and process a message"""
|
|
||||||
for message in self.messages:
|
|
||||||
if message["message_id"] == message_id:
|
|
||||||
message["status"] = "received"
|
|
||||||
message["received_timestamp"] = datetime.utcnow().isoformat()
|
|
||||||
return message
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_messages_by_agent(self, agent_id: str) -> List[Dict[str, Any]]:
|
|
||||||
"""Get all messages for a specific agent"""
|
|
||||||
return [
|
|
||||||
msg for msg in self.messages
|
|
||||||
if msg["sender_id"] == agent_id or msg["receiver_id"] == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
class AgentMessageClient:
|
|
||||||
"""Client for agent message communication"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, protocol: MessageProtocol):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.protocol = protocol
|
|
||||||
self.received_messages = []
|
|
||||||
|
|
||||||
def send_message(
|
|
||||||
self,
|
|
||||||
receiver_id: str,
|
|
||||||
message_type: MessageTypes,
|
|
||||||
content: Dict[str, Any]
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Send a message to another agent"""
|
|
||||||
message = self.protocol.create_message(
|
|
||||||
sender_id=self.agent_id,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
message_type=message_type,
|
|
||||||
content=content
|
|
||||||
)
|
|
||||||
self.protocol.send_message(message)
|
|
||||||
return message
|
|
||||||
|
|
||||||
def receive_messages(self) -> List[Dict[str, Any]]:
|
|
||||||
"""Receive all pending messages for this agent"""
|
|
||||||
messages = []
|
|
||||||
for message in self.protocol.messages:
|
|
||||||
if (message["receiver_id"] == self.agent_id and
|
|
||||||
message["status"] == "sent" and
|
|
||||||
message not in self.received_messages):
|
|
||||||
self.protocol.receive_message(message["message_id"])
|
|
||||||
self.received_messages.append(message)
|
|
||||||
messages.append(message)
|
|
||||||
return messages
|
|
||||||
@@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Task Manager for AITBC Agents
|
|
||||||
Handles task creation, assignment, and tracking
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Any, Optional, List
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
class TaskStatus(Enum):
|
|
||||||
"""Task status enumeration"""
|
|
||||||
PENDING = "pending"
|
|
||||||
IN_PROGRESS = "in_progress"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
FAILED = "failed"
|
|
||||||
CANCELLED = "cancelled"
|
|
||||||
|
|
||||||
class TaskPriority(Enum):
|
|
||||||
"""Task priority enumeration"""
|
|
||||||
LOW = "low"
|
|
||||||
MEDIUM = "medium"
|
|
||||||
HIGH = "high"
|
|
||||||
URGENT = "urgent"
|
|
||||||
|
|
||||||
class Task:
|
|
||||||
"""Task representation"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
):
|
|
||||||
self.task_id = task_id
|
|
||||||
self.title = title
|
|
||||||
self.description = description
|
|
||||||
self.assigned_to = assigned_to
|
|
||||||
self.priority = priority
|
|
||||||
self.created_by = created_by or assigned_to
|
|
||||||
self.status = TaskStatus.PENDING
|
|
||||||
self.created_at = datetime.utcnow()
|
|
||||||
self.updated_at = datetime.utcnow()
|
|
||||||
self.completed_at = None
|
|
||||||
self.result = None
|
|
||||||
self.error = None
|
|
||||||
|
|
||||||
class TaskManager:
|
|
||||||
"""Task manager for agent coordination"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tasks = {}
|
|
||||||
self.task_history = []
|
|
||||||
|
|
||||||
def create_task(
|
|
||||||
self,
|
|
||||||
title: str,
|
|
||||||
description: str,
|
|
||||||
assigned_to: str,
|
|
||||||
priority: TaskPriority = TaskPriority.MEDIUM,
|
|
||||||
created_by: Optional[str] = None
|
|
||||||
) -> Task:
|
|
||||||
"""Create a new task"""
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
task = Task(
|
|
||||||
task_id=task_id,
|
|
||||||
title=title,
|
|
||||||
description=description,
|
|
||||||
assigned_to=assigned_to,
|
|
||||||
priority=priority,
|
|
||||||
created_by=created_by
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tasks[task_id] = task
|
|
||||||
return task
|
|
||||||
|
|
||||||
def get_task(self, task_id: str) -> Optional[Task]:
|
|
||||||
"""Get a task by ID"""
|
|
||||||
return self.tasks.get(task_id)
|
|
||||||
|
|
||||||
def update_task_status(
|
|
||||||
self,
|
|
||||||
task_id: str,
|
|
||||||
status: TaskStatus,
|
|
||||||
result: Optional[Dict[str, Any]] = None,
|
|
||||||
error: Optional[str] = None
|
|
||||||
) -> bool:
|
|
||||||
"""Update task status"""
|
|
||||||
task = self.get_task(task_id)
|
|
||||||
if not task:
|
|
||||||
return False
|
|
||||||
|
|
||||||
task.status = status
|
|
||||||
task.updated_at = datetime.utcnow()
|
|
||||||
|
|
||||||
if status == TaskStatus.COMPLETED:
|
|
||||||
task.completed_at = datetime.utcnow()
|
|
||||||
task.result = result
|
|
||||||
elif status == TaskStatus.FAILED:
|
|
||||||
task.error = error
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_tasks_by_agent(self, agent_id: str) -> List[Task]:
|
|
||||||
"""Get all tasks assigned to an agent"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.assigned_to == agent_id
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_tasks_by_status(self, status: TaskStatus) -> List[Task]:
|
|
||||||
"""Get all tasks with a specific status"""
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status == status
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_overdue_tasks(self, hours: int = 24) -> List[Task]:
|
|
||||||
"""Get tasks that are overdue"""
|
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
|
||||||
return [
|
|
||||||
task for task in self.tasks.values()
|
|
||||||
if task.status in [TaskStatus.PENDING, TaskStatus.IN_PROGRESS] and
|
|
||||||
task.created_at < cutoff_time
|
|
||||||
]
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Agent Registry Service
|
|
||||||
Central agent discovery and registration system
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fastapi import FastAPI, HTTPException, Depends
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from typing import List, Optional, Dict, Any
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
# Startup
|
|
||||||
init_db()
|
|
||||||
yield
|
|
||||||
# Shutdown (cleanup if needed)
|
|
||||||
pass
|
|
||||||
|
|
||||||
app = FastAPI(title="AITBC Agent Registry API", version="1.0.0", lifespan=lifespan)
|
|
||||||
|
|
||||||
# Database setup
|
|
||||||
def get_db():
|
|
||||||
conn = sqlite3.connect('agent_registry.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_db_connection():
|
|
||||||
conn = get_db()
|
|
||||||
try:
|
|
||||||
yield conn
|
|
||||||
finally:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
def init_db():
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
CREATE TABLE IF NOT EXISTS agents (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
type TEXT NOT NULL,
|
|
||||||
capabilities TEXT NOT NULL,
|
|
||||||
chain_id TEXT NOT NULL,
|
|
||||||
endpoint TEXT NOT NULL,
|
|
||||||
status TEXT DEFAULT 'active',
|
|
||||||
last_heartbeat TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
metadata TEXT,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
''')
|
|
||||||
|
|
||||||
# Models
|
|
||||||
class Agent(BaseModel):
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
class AgentRegistration(BaseModel):
|
|
||||||
name: str
|
|
||||||
type: str
|
|
||||||
capabilities: List[str]
|
|
||||||
chain_id: str
|
|
||||||
endpoint: str
|
|
||||||
metadata: Optional[Dict[str, Any]] = {}
|
|
||||||
|
|
||||||
# API Endpoints
|
|
||||||
|
|
||||||
@app.post("/api/agents/register", response_model=Agent)
|
|
||||||
async def register_agent(agent: AgentRegistration):
|
|
||||||
"""Register a new agent"""
|
|
||||||
agent_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
conn.execute('''
|
|
||||||
INSERT INTO agents (id, name, type, capabilities, chain_id, endpoint, metadata)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
''', (
|
|
||||||
agent_id, agent.name, agent.type,
|
|
||||||
json.dumps(agent.capabilities), agent.chain_id,
|
|
||||||
agent.endpoint, json.dumps(agent.metadata)
|
|
||||||
))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
return Agent(
|
|
||||||
id=agent_id,
|
|
||||||
name=agent.name,
|
|
||||||
type=agent.type,
|
|
||||||
capabilities=agent.capabilities,
|
|
||||||
chain_id=agent.chain_id,
|
|
||||||
endpoint=agent.endpoint,
|
|
||||||
metadata=agent.metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/agents", response_model=List[Agent])
|
|
||||||
async def list_agents(
|
|
||||||
agent_type: Optional[str] = None,
|
|
||||||
chain_id: Optional[str] = None,
|
|
||||||
capability: Optional[str] = None
|
|
||||||
):
|
|
||||||
"""List registered agents with optional filters"""
|
|
||||||
with get_db_connection() as conn:
|
|
||||||
query = "SELECT * FROM agents WHERE status = 'active'"
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if agent_type:
|
|
||||||
query += " AND type = ?"
|
|
||||||
params.append(agent_type)
|
|
||||||
|
|
||||||
if chain_id:
|
|
||||||
query += " AND chain_id = ?"
|
|
||||||
params.append(chain_id)
|
|
||||||
|
|
||||||
if capability:
|
|
||||||
query += " AND capabilities LIKE ?"
|
|
||||||
params.append(f'%{capability}%')
|
|
||||||
|
|
||||||
agents = conn.execute(query, params).fetchall()
|
|
||||||
|
|
||||||
return [
|
|
||||||
Agent(
|
|
||||||
id=agent["id"],
|
|
||||||
name=agent["name"],
|
|
||||||
type=agent["type"],
|
|
||||||
capabilities=json.loads(agent["capabilities"]),
|
|
||||||
chain_id=agent["chain_id"],
|
|
||||||
endpoint=agent["endpoint"],
|
|
||||||
metadata=json.loads(agent["metadata"] or "{}")
|
|
||||||
)
|
|
||||||
for agent in agents
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/health")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint"""
|
|
||||||
return {"status": "ok", "timestamp": datetime.utcnow()}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8013)
|
|
||||||
@@ -1,431 +0,0 @@
|
|||||||
"""
|
|
||||||
Agent Registration System
|
|
||||||
Handles AI agent registration, capability management, and discovery
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass, asdict
|
|
||||||
from enum import Enum
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
class AgentType(Enum):
|
|
||||||
AI_MODEL = "ai_model"
|
|
||||||
DATA_PROVIDER = "data_provider"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
MARKET_MAKER = "market_maker"
|
|
||||||
BROKER = "broker"
|
|
||||||
ORACLE = "oracle"
|
|
||||||
|
|
||||||
class AgentStatus(Enum):
|
|
||||||
REGISTERED = "registered"
|
|
||||||
ACTIVE = "active"
|
|
||||||
INACTIVE = "inactive"
|
|
||||||
SUSPENDED = "suspended"
|
|
||||||
BANNED = "banned"
|
|
||||||
|
|
||||||
class CapabilityType(Enum):
|
|
||||||
TEXT_GENERATION = "text_generation"
|
|
||||||
IMAGE_GENERATION = "image_generation"
|
|
||||||
DATA_ANALYSIS = "data_analysis"
|
|
||||||
PREDICTION = "prediction"
|
|
||||||
VALIDATION = "validation"
|
|
||||||
COMPUTATION = "computation"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentCapability:
|
|
||||||
capability_type: CapabilityType
|
|
||||||
name: str
|
|
||||||
version: str
|
|
||||||
parameters: Dict
|
|
||||||
performance_metrics: Dict
|
|
||||||
cost_per_use: Decimal
|
|
||||||
availability: float
|
|
||||||
max_concurrent_jobs: int
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AgentInfo:
|
|
||||||
agent_id: str
|
|
||||||
agent_type: AgentType
|
|
||||||
name: str
|
|
||||||
owner_address: str
|
|
||||||
public_key: str
|
|
||||||
endpoint_url: str
|
|
||||||
capabilities: List[AgentCapability]
|
|
||||||
reputation_score: float
|
|
||||||
total_jobs_completed: int
|
|
||||||
total_earnings: Decimal
|
|
||||||
registration_time: float
|
|
||||||
last_active: float
|
|
||||||
status: AgentStatus
|
|
||||||
metadata: Dict
|
|
||||||
|
|
||||||
class AgentRegistry:
|
|
||||||
"""Manages AI agent registration and discovery"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.agents: Dict[str, AgentInfo] = {}
|
|
||||||
self.capability_index: Dict[CapabilityType, Set[str]] = {} # capability -> agent_ids
|
|
||||||
self.type_index: Dict[AgentType, Set[str]] = {} # agent_type -> agent_ids
|
|
||||||
self.reputation_scores: Dict[str, float] = {}
|
|
||||||
self.registration_queue: List[Dict] = []
|
|
||||||
|
|
||||||
# Registry parameters
|
|
||||||
self.min_reputation_threshold = 0.5
|
|
||||||
self.max_agents_per_type = 1000
|
|
||||||
self.registration_fee = Decimal('100.0')
|
|
||||||
self.inactivity_threshold = 86400 * 7 # 7 days
|
|
||||||
|
|
||||||
# Initialize capability index
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
self.capability_index[capability_type] = set()
|
|
||||||
|
|
||||||
# Initialize type index
|
|
||||||
for agent_type in AgentType:
|
|
||||||
self.type_index[agent_type] = set()
|
|
||||||
|
|
||||||
async def register_agent(self, agent_type: AgentType, name: str, owner_address: str,
|
|
||||||
public_key: str, endpoint_url: str, capabilities: List[Dict],
|
|
||||||
metadata: Dict = None) -> Tuple[bool, str, Optional[str]]:
|
|
||||||
"""Register a new AI agent"""
|
|
||||||
try:
|
|
||||||
# Validate inputs
|
|
||||||
if not self._validate_registration_inputs(agent_type, name, owner_address, public_key, endpoint_url):
|
|
||||||
return False, "Invalid registration inputs", None
|
|
||||||
|
|
||||||
# Check if agent already exists
|
|
||||||
agent_id = self._generate_agent_id(owner_address, name)
|
|
||||||
if agent_id in self.agents:
|
|
||||||
return False, "Agent already registered", None
|
|
||||||
|
|
||||||
# Check type limits
|
|
||||||
if len(self.type_index[agent_type]) >= self.max_agents_per_type:
|
|
||||||
return False, f"Maximum agents of type {agent_type.value} reached", None
|
|
||||||
|
|
||||||
# Convert capabilities
|
|
||||||
agent_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
agent_capabilities.append(capability)
|
|
||||||
|
|
||||||
if not agent_capabilities:
|
|
||||||
return False, "Agent must have at least one valid capability", None
|
|
||||||
|
|
||||||
# Create agent info
|
|
||||||
agent_info = AgentInfo(
|
|
||||||
agent_id=agent_id,
|
|
||||||
agent_type=agent_type,
|
|
||||||
name=name,
|
|
||||||
owner_address=owner_address,
|
|
||||||
public_key=public_key,
|
|
||||||
endpoint_url=endpoint_url,
|
|
||||||
capabilities=agent_capabilities,
|
|
||||||
reputation_score=1.0, # Start with neutral reputation
|
|
||||||
total_jobs_completed=0,
|
|
||||||
total_earnings=Decimal('0'),
|
|
||||||
registration_time=time.time(),
|
|
||||||
last_active=time.time(),
|
|
||||||
status=AgentStatus.REGISTERED,
|
|
||||||
metadata=metadata or {}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add to registry
|
|
||||||
self.agents[agent_id] = agent_info
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent_type].add(agent_id)
|
|
||||||
for capability in agent_capabilities:
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
log_info(f"Agent registered: {agent_id} ({name})")
|
|
||||||
return True, "Registration successful", agent_id
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Registration failed: {str(e)}", None
|
|
||||||
|
|
||||||
def _validate_registration_inputs(self, agent_type: AgentType, name: str,
|
|
||||||
owner_address: str, public_key: str, endpoint_url: str) -> bool:
|
|
||||||
"""Validate registration inputs"""
|
|
||||||
# Check required fields
|
|
||||||
if not all([agent_type, name, owner_address, public_key, endpoint_url]):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate address format (simplified)
|
|
||||||
if not owner_address.startswith('0x') or len(owner_address) != 42:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate URL format (simplified)
|
|
||||||
if not endpoint_url.startswith(('http://', 'https://')):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if len(name) < 3 or len(name) > 100:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _generate_agent_id(self, owner_address: str, name: str) -> str:
|
|
||||||
"""Generate unique agent ID"""
|
|
||||||
content = f"{owner_address}:{name}:{time.time()}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
|
||||||
|
|
||||||
def _create_capability_from_data(self, cap_data: Dict) -> Optional[AgentCapability]:
|
|
||||||
"""Create capability from data dictionary"""
|
|
||||||
try:
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['type', 'name', 'version', 'cost_per_use']
|
|
||||||
if not all(field in cap_data for field in required_fields):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Parse capability type
|
|
||||||
try:
|
|
||||||
capability_type = CapabilityType(cap_data['type'])
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Create capability
|
|
||||||
return AgentCapability(
|
|
||||||
capability_type=capability_type,
|
|
||||||
name=cap_data['name'],
|
|
||||||
version=cap_data['version'],
|
|
||||||
parameters=cap_data.get('parameters', {}),
|
|
||||||
performance_metrics=cap_data.get('performance_metrics', {}),
|
|
||||||
cost_per_use=Decimal(str(cap_data['cost_per_use'])),
|
|
||||||
availability=cap_data.get('availability', 1.0),
|
|
||||||
max_concurrent_jobs=cap_data.get('max_concurrent_jobs', 1)
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
log_error(f"Error creating capability: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def update_agent_status(self, agent_id: str, status: AgentStatus) -> Tuple[bool, str]:
|
|
||||||
"""Update agent status"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
old_status = agent.status
|
|
||||||
agent.status = status
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
log_info(f"Agent {agent_id} status changed: {old_status.value} -> {status.value}")
|
|
||||||
return True, "Status updated successfully"
|
|
||||||
|
|
||||||
async def update_agent_capabilities(self, agent_id: str, capabilities: List[Dict]) -> Tuple[bool, str]:
|
|
||||||
"""Update agent capabilities"""
|
|
||||||
if agent_id not in self.agents:
|
|
||||||
return False, "Agent not found"
|
|
||||||
|
|
||||||
agent = self.agents[agent_id]
|
|
||||||
|
|
||||||
# Remove old capabilities from index
|
|
||||||
for old_capability in agent.capabilities:
|
|
||||||
self.capability_index[old_capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
# Add new capabilities
|
|
||||||
new_capabilities = []
|
|
||||||
for cap_data in capabilities:
|
|
||||||
capability = self._create_capability_from_data(cap_data)
|
|
||||||
if capability:
|
|
||||||
new_capabilities.append(capability)
|
|
||||||
self.capability_index[capability.capability_type].add(agent_id)
|
|
||||||
|
|
||||||
if not new_capabilities:
|
|
||||||
return False, "No valid capabilities provided"
|
|
||||||
|
|
||||||
agent.capabilities = new_capabilities
|
|
||||||
agent.last_active = time.time()
|
|
||||||
|
|
||||||
return True, "Capabilities updated successfully"
|
|
||||||
|
|
||||||
async def find_agents_by_capability(self, capability_type: CapabilityType,
|
|
||||||
filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by capability type"""
|
|
||||||
agent_ids = self.capability_index.get(capability_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
async def find_agents_by_type(self, agent_type: AgentType, filters: Dict = None) -> List[AgentInfo]:
|
|
||||||
"""Find agents by type"""
|
|
||||||
agent_ids = self.type_index.get(agent_type, set())
|
|
||||||
|
|
||||||
agents = []
|
|
||||||
for agent_id in agent_ids:
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if agent and agent.status == AgentStatus.ACTIVE:
|
|
||||||
if self._matches_filters(agent, filters):
|
|
||||||
agents.append(agent)
|
|
||||||
|
|
||||||
# Sort by reputation (highest first)
|
|
||||||
agents.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return agents
|
|
||||||
|
|
||||||
def _matches_filters(self, agent: AgentInfo, filters: Dict) -> bool:
|
|
||||||
"""Check if agent matches filters"""
|
|
||||||
if not filters:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Reputation filter
|
|
||||||
if 'min_reputation' in filters:
|
|
||||||
if agent.reputation_score < filters['min_reputation']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cost filter
|
|
||||||
if 'max_cost_per_use' in filters:
|
|
||||||
max_cost = Decimal(str(filters['max_cost_per_use']))
|
|
||||||
if any(cap.cost_per_use > max_cost for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Availability filter
|
|
||||||
if 'min_availability' in filters:
|
|
||||||
min_availability = filters['min_availability']
|
|
||||||
if any(cap.availability < min_availability for cap in agent.capabilities):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Location filter (if implemented)
|
|
||||||
if 'location' in filters:
|
|
||||||
agent_location = agent.metadata.get('location')
|
|
||||||
if agent_location != filters['location']:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def get_agent_info(self, agent_id: str) -> Optional[AgentInfo]:
|
|
||||||
"""Get agent information"""
|
|
||||||
return self.agents.get(agent_id)
|
|
||||||
|
|
||||||
async def search_agents(self, query: str, limit: int = 50) -> List[AgentInfo]:
|
|
||||||
"""Search agents by name or capability"""
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for agent in self.agents.values():
|
|
||||||
if agent.status != AgentStatus.ACTIVE:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in name
|
|
||||||
if query_lower in agent.name.lower():
|
|
||||||
results.append(agent)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Search in capabilities
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
if (query_lower in capability.name.lower() or
|
|
||||||
query_lower in capability.capability_type.value):
|
|
||||||
results.append(agent)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Sort by relevance (reputation)
|
|
||||||
results.sort(key=lambda x: x.reputation_score, reverse=True)
|
|
||||||
return results[:limit]
|
|
||||||
|
|
||||||
async def get_agent_statistics(self, agent_id: str) -> Optional[Dict]:
|
|
||||||
"""Get detailed statistics for an agent"""
|
|
||||||
agent = self.agents.get(agent_id)
|
|
||||||
if not agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Calculate additional statistics
|
|
||||||
avg_job_earnings = agent.total_earnings / agent.total_jobs_completed if agent.total_jobs_completed > 0 else Decimal('0')
|
|
||||||
days_active = (time.time() - agent.registration_time) / 86400
|
|
||||||
jobs_per_day = agent.total_jobs_completed / days_active if days_active > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
'agent_id': agent_id,
|
|
||||||
'name': agent.name,
|
|
||||||
'type': agent.agent_type.value,
|
|
||||||
'status': agent.status.value,
|
|
||||||
'reputation_score': agent.reputation_score,
|
|
||||||
'total_jobs_completed': agent.total_jobs_completed,
|
|
||||||
'total_earnings': float(agent.total_earnings),
|
|
||||||
'avg_job_earnings': float(avg_job_earnings),
|
|
||||||
'jobs_per_day': jobs_per_day,
|
|
||||||
'days_active': int(days_active),
|
|
||||||
'capabilities_count': len(agent.capabilities),
|
|
||||||
'last_active': agent.last_active,
|
|
||||||
'registration_time': agent.registration_time
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_registry_statistics(self) -> Dict:
|
|
||||||
"""Get registry-wide statistics"""
|
|
||||||
total_agents = len(self.agents)
|
|
||||||
active_agents = len([a for a in self.agents.values() if a.status == AgentStatus.ACTIVE])
|
|
||||||
|
|
||||||
# Count by type
|
|
||||||
type_counts = {}
|
|
||||||
for agent_type in AgentType:
|
|
||||||
type_counts[agent_type.value] = len(self.type_index[agent_type])
|
|
||||||
|
|
||||||
# Count by capability
|
|
||||||
capability_counts = {}
|
|
||||||
for capability_type in CapabilityType:
|
|
||||||
capability_counts[capability_type.value] = len(self.capability_index[capability_type])
|
|
||||||
|
|
||||||
# Reputation statistics
|
|
||||||
reputations = [a.reputation_score for a in self.agents.values()]
|
|
||||||
avg_reputation = sum(reputations) / len(reputations) if reputations else 0
|
|
||||||
|
|
||||||
# Earnings statistics
|
|
||||||
total_earnings = sum(a.total_earnings for a in self.agents.values())
|
|
||||||
|
|
||||||
return {
|
|
||||||
'total_agents': total_agents,
|
|
||||||
'active_agents': active_agents,
|
|
||||||
'inactive_agents': total_agents - active_agents,
|
|
||||||
'agent_types': type_counts,
|
|
||||||
'capabilities': capability_counts,
|
|
||||||
'average_reputation': avg_reputation,
|
|
||||||
'total_earnings': float(total_earnings),
|
|
||||||
'registration_fee': float(self.registration_fee)
|
|
||||||
}
|
|
||||||
|
|
||||||
async def cleanup_inactive_agents(self) -> Tuple[int, str]:
|
|
||||||
"""Clean up inactive agents"""
|
|
||||||
current_time = time.time()
|
|
||||||
cleaned_count = 0
|
|
||||||
|
|
||||||
for agent_id, agent in list(self.agents.items()):
|
|
||||||
if (agent.status == AgentStatus.INACTIVE and
|
|
||||||
current_time - agent.last_active > self.inactivity_threshold):
|
|
||||||
|
|
||||||
# Remove from registry
|
|
||||||
del self.agents[agent_id]
|
|
||||||
|
|
||||||
# Update indexes
|
|
||||||
self.type_index[agent.agent_type].discard(agent_id)
|
|
||||||
for capability in agent.capabilities:
|
|
||||||
self.capability_index[capability.capability_type].discard(agent_id)
|
|
||||||
|
|
||||||
cleaned_count += 1
|
|
||||||
|
|
||||||
if cleaned_count > 0:
|
|
||||||
log_info(f"Cleaned up {cleaned_count} inactive agents")
|
|
||||||
|
|
||||||
return cleaned_count, f"Cleaned up {cleaned_count} inactive agents"
|
|
||||||
|
|
||||||
# Global agent registry
|
|
||||||
agent_registry: Optional[AgentRegistry] = None
|
|
||||||
|
|
||||||
def get_agent_registry() -> Optional[AgentRegistry]:
|
|
||||||
"""Get global agent registry"""
|
|
||||||
return agent_registry
|
|
||||||
|
|
||||||
def create_agent_registry() -> AgentRegistry:
|
|
||||||
"""Create and set global agent registry"""
|
|
||||||
global agent_registry
|
|
||||||
agent_registry = AgentRegistry()
|
|
||||||
return agent_registry
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
AITBC Trading Agent
|
|
||||||
Automated trading agent for AITBC marketplace
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
|
|
||||||
|
|
||||||
from apps.agent_services.agent_bridge.src.integration_layer import AgentServiceBridge
|
|
||||||
|
|
||||||
class TradingAgent:
|
|
||||||
"""Automated trading agent"""
|
|
||||||
|
|
||||||
def __init__(self, agent_id: str, config: Dict[str, Any]):
|
|
||||||
self.agent_id = agent_id
|
|
||||||
self.config = config
|
|
||||||
self.bridge = AgentServiceBridge()
|
|
||||||
self.is_running = False
|
|
||||||
self.trading_strategy = config.get("strategy", "basic")
|
|
||||||
self.symbols = config.get("symbols", ["AITBC/BTC"])
|
|
||||||
self.trade_interval = config.get("trade_interval", 60) # seconds
|
|
||||||
|
|
||||||
async def start(self) -> bool:
|
|
||||||
"""Start trading agent"""
|
|
||||||
try:
|
|
||||||
# Register with service bridge
|
|
||||||
success = await self.bridge.start_agent(self.agent_id, {
|
|
||||||
"type": "trading",
|
|
||||||
"capabilities": ["market_analysis", "trading", "risk_management"],
|
|
||||||
"endpoint": f"http://localhost:8005"
|
|
||||||
})
|
|
||||||
|
|
||||||
if success:
|
|
||||||
self.is_running = True
|
|
||||||
print(f"Trading agent {self.agent_id} started successfully")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
print(f"Failed to start trading agent {self.agent_id}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error starting trading agent: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def stop(self) -> bool:
|
|
||||||
"""Stop trading agent"""
|
|
||||||
self.is_running = False
|
|
||||||
success = await self.bridge.stop_agent(self.agent_id)
|
|
||||||
if success:
|
|
||||||
print(f"Trading agent {self.agent_id} stopped successfully")
|
|
||||||
return success
|
|
||||||
|
|
||||||
async def run_trading_loop(self):
|
|
||||||
"""Main trading loop"""
|
|
||||||
while self.is_running:
|
|
||||||
try:
|
|
||||||
for symbol in self.symbols:
|
|
||||||
await self._analyze_and_trade(symbol)
|
|
||||||
|
|
||||||
await asyncio.sleep(self.trade_interval)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in trading loop: {e}")
|
|
||||||
await asyncio.sleep(10) # Wait before retrying
|
|
||||||
|
|
||||||
async def _analyze_and_trade(self, symbol: str) -> None:
|
|
||||||
"""Analyze market and execute trades"""
|
|
||||||
try:
|
|
||||||
# Perform market analysis
|
|
||||||
analysis_task = {
|
|
||||||
"type": "market_analysis",
|
|
||||||
"symbol": symbol,
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
|
|
||||||
analysis_result = await self.bridge.execute_agent_task(self.agent_id, analysis_task)
|
|
||||||
|
|
||||||
if analysis_result.get("status") == "success":
|
|
||||||
analysis = analysis_result["result"]["analysis"]
|
|
||||||
|
|
||||||
# Make trading decision
|
|
||||||
if self._should_trade(analysis):
|
|
||||||
await self._execute_trade(symbol, analysis)
|
|
||||||
else:
|
|
||||||
print(f"Market analysis failed for {symbol}: {analysis_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error in analyze_and_trade for {symbol}: {e}")
|
|
||||||
|
|
||||||
def _should_trade(self, analysis: Dict[str, Any]) -> bool:
|
|
||||||
"""Determine if should execute trade"""
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
return recommendation in ["buy", "sell"]
|
|
||||||
|
|
||||||
async def _execute_trade(self, symbol: str, analysis: Dict[str, Any]) -> None:
|
|
||||||
"""Execute trade based on analysis"""
|
|
||||||
try:
|
|
||||||
recommendation = analysis.get("recommendation", "hold")
|
|
||||||
|
|
||||||
if recommendation == "buy":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "buy",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
elif recommendation == "sell":
|
|
||||||
trade_task = {
|
|
||||||
"type": "trading",
|
|
||||||
"symbol": symbol,
|
|
||||||
"side": "sell",
|
|
||||||
"amount": self.config.get("trade_amount", 0.1),
|
|
||||||
"strategy": self.trading_strategy
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
trade_result = await self.bridge.execute_agent_task(self.agent_id, trade_task)
|
|
||||||
|
|
||||||
if trade_result.get("status") == "success":
|
|
||||||
print(f"Trade executed successfully: {trade_result}")
|
|
||||||
else:
|
|
||||||
print(f"Trade execution failed: {trade_result}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error executing trade: {e}")
|
|
||||||
|
|
||||||
async def get_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get agent status"""
|
|
||||||
return await self.bridge.get_agent_status(self.agent_id)
|
|
||||||
|
|
||||||
# Main execution
|
|
||||||
async def main():
|
|
||||||
"""Main trading agent execution"""
|
|
||||||
agent_id = "trading-agent-001"
|
|
||||||
config = {
|
|
||||||
"strategy": "basic",
|
|
||||||
"symbols": ["AITBC/BTC"],
|
|
||||||
"trade_interval": 30,
|
|
||||||
"trade_amount": 0.1
|
|
||||||
}
|
|
||||||
|
|
||||||
agent = TradingAgent(agent_id, config)
|
|
||||||
|
|
||||||
# Start agent
|
|
||||||
if await agent.start():
|
|
||||||
try:
|
|
||||||
# Run trading loop
|
|
||||||
await agent.run_trading_loop()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print("Shutting down trading agent...")
|
|
||||||
finally:
|
|
||||||
await agent.stop()
|
|
||||||
else:
|
|
||||||
print("Failed to start trading agent")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .poa import PoAProposer, ProposerConfig, CircuitBreaker
|
|
||||||
|
|
||||||
__all__ = ["PoAProposer", "ProposerConfig", "CircuitBreaker"]
|
|
||||||
@@ -1,345 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Callable, ContextManager, Optional
|
|
||||||
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from ..logger import get_logger
|
|
||||||
from ..metrics import metrics_registry
|
|
||||||
from ..config import ProposerConfig
|
|
||||||
from ..models import Block, Account
|
|
||||||
from ..gossip import gossip_broker
|
|
||||||
|
|
||||||
_METRIC_KEY_SANITIZE = re.compile(r"[^a-zA-Z0-9_]")
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_metric_suffix(value: str) -> str:
|
|
||||||
sanitized = _METRIC_KEY_SANITIZE.sub("_", value).strip("_")
|
|
||||||
return sanitized or "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
class CircuitBreaker:
|
|
||||||
def __init__(self, threshold: int, timeout: int):
|
|
||||||
self._threshold = threshold
|
|
||||||
self._timeout = timeout
|
|
||||||
self._failures = 0
|
|
||||||
self._last_failure_time = 0.0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self) -> str:
|
|
||||||
if self._state == "open":
|
|
||||||
if time.time() - self._last_failure_time > self._timeout:
|
|
||||||
self._state = "half-open"
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def allow_request(self) -> bool:
|
|
||||||
state = self.state
|
|
||||||
if state == "closed":
|
|
||||||
return True
|
|
||||||
if state == "half-open":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def record_failure(self) -> None:
|
|
||||||
self._failures += 1
|
|
||||||
self._last_failure_time = time.time()
|
|
||||||
if self._failures >= self._threshold:
|
|
||||||
self._state = "open"
|
|
||||||
|
|
||||||
def record_success(self) -> None:
|
|
||||||
self._failures = 0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
class PoAProposer:
|
|
||||||
"""Proof-of-Authority block proposer.
|
|
||||||
|
|
||||||
Responsible for periodically proposing blocks if this node is configured as a proposer.
|
|
||||||
In the real implementation, this would involve checking the mempool, validating transactions,
|
|
||||||
and signing the block.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
config: ProposerConfig,
|
|
||||||
session_factory: Callable[[], ContextManager[Session]],
|
|
||||||
) -> None:
|
|
||||||
self._config = config
|
|
||||||
self._session_factory = session_factory
|
|
||||||
self._logger = get_logger(__name__)
|
|
||||||
self._stop_event = asyncio.Event()
|
|
||||||
self._task: Optional[asyncio.Task[None]] = None
|
|
||||||
self._last_proposer_id: Optional[str] = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
if self._task is not None:
|
|
||||||
return
|
|
||||||
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
|
||||||
await self._ensure_genesis_block()
|
|
||||||
self._stop_event.clear()
|
|
||||||
self._task = asyncio.create_task(self._run_loop())
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
if self._task is None:
|
|
||||||
return
|
|
||||||
self._logger.info("Stopping PoA proposer loop")
|
|
||||||
self._stop_event.set()
|
|
||||||
await self._task
|
|
||||||
self._task = None
|
|
||||||
|
|
||||||
async def _run_loop(self) -> None:
|
|
||||||
while not self._stop_event.is_set():
|
|
||||||
await self._wait_until_next_slot()
|
|
||||||
if self._stop_event.is_set():
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
await self._propose_block()
|
|
||||||
except Exception as exc: # pragma: no cover - defensive logging
|
|
||||||
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
|
||||||
|
|
||||||
async def _wait_until_next_slot(self) -> None:
|
|
||||||
head = self._fetch_chain_head()
|
|
||||||
if head is None:
|
|
||||||
return
|
|
||||||
now = datetime.utcnow()
|
|
||||||
elapsed = (now - head.timestamp).total_seconds()
|
|
||||||
sleep_for = max(self._config.interval_seconds - elapsed, 0.1)
|
|
||||||
if sleep_for <= 0:
|
|
||||||
sleep_for = 0.1
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
return
|
|
||||||
|
|
||||||
async def _propose_block(self) -> None:
|
|
||||||
# Check internal mempool and include transactions
|
|
||||||
from ..mempool import get_mempool
|
|
||||||
from ..models import Transaction, Account
|
|
||||||
mempool = get_mempool()
|
|
||||||
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
next_height = 0
|
|
||||||
parent_hash = "0x00"
|
|
||||||
interval_seconds: Optional[float] = None
|
|
||||||
if head is not None:
|
|
||||||
next_height = head.height + 1
|
|
||||||
parent_hash = head.hash
|
|
||||||
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
|
||||||
|
|
||||||
timestamp = datetime.utcnow()
|
|
||||||
|
|
||||||
# Pull transactions from mempool
|
|
||||||
max_txs = self._config.max_txs_per_block
|
|
||||||
max_bytes = self._config.max_block_size_bytes
|
|
||||||
pending_txs = mempool.drain(max_txs, max_bytes, self._config.chain_id)
|
|
||||||
self._logger.info(f"[PROPOSE] drained {len(pending_txs)} txs from mempool, chain={self._config.chain_id}")
|
|
||||||
|
|
||||||
# Process transactions and update balances
|
|
||||||
processed_txs = []
|
|
||||||
for tx in pending_txs:
|
|
||||||
try:
|
|
||||||
# Parse transaction data
|
|
||||||
tx_data = tx.content
|
|
||||||
sender = tx_data.get("from")
|
|
||||||
recipient = tx_data.get("to")
|
|
||||||
value = tx_data.get("amount", 0)
|
|
||||||
fee = tx_data.get("fee", 0)
|
|
||||||
|
|
||||||
if not sender or not recipient:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get sender account
|
|
||||||
sender_account = session.get(Account, (self._config.chain_id, sender))
|
|
||||||
if not sender_account:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check sufficient balance
|
|
||||||
total_cost = value + fee
|
|
||||||
if sender_account.balance < total_cost:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get or create recipient account
|
|
||||||
recipient_account = session.get(Account, (self._config.chain_id, recipient))
|
|
||||||
if not recipient_account:
|
|
||||||
recipient_account = Account(chain_id=self._config.chain_id, address=recipient, balance=0, nonce=0)
|
|
||||||
session.add(recipient_account)
|
|
||||||
session.flush()
|
|
||||||
|
|
||||||
# Update balances
|
|
||||||
sender_account.balance -= total_cost
|
|
||||||
sender_account.nonce += 1
|
|
||||||
recipient_account.balance += value
|
|
||||||
|
|
||||||
# Create transaction record
|
|
||||||
transaction = Transaction(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
tx_hash=tx.tx_hash,
|
|
||||||
sender=sender,
|
|
||||||
recipient=recipient,
|
|
||||||
payload=tx_data,
|
|
||||||
value=value,
|
|
||||||
fee=fee,
|
|
||||||
nonce=sender_account.nonce - 1,
|
|
||||||
timestamp=timestamp,
|
|
||||||
block_height=next_height,
|
|
||||||
status="confirmed"
|
|
||||||
)
|
|
||||||
session.add(transaction)
|
|
||||||
processed_txs.append(tx)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._logger.warning(f"Failed to process transaction {tx.tx_hash}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Compute block hash with transaction data
|
|
||||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp, processed_txs)
|
|
||||||
|
|
||||||
block = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=next_height,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash=parent_hash,
|
|
||||||
proposer=self._config.proposer_id,
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=len(processed_txs),
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(block)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
metrics_registry.increment("blocks_proposed_total")
|
|
||||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
|
||||||
if interval_seconds is not None and interval_seconds >= 0:
|
|
||||||
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
|
||||||
metrics_registry.set_gauge("poa_last_block_interval_seconds", float(interval_seconds))
|
|
||||||
|
|
||||||
proposer_suffix = _sanitize_metric_suffix(self._config.proposer_id)
|
|
||||||
metrics_registry.increment(f"poa_blocks_proposed_total_{proposer_suffix}")
|
|
||||||
if self._last_proposer_id is not None and self._last_proposer_id != self._config.proposer_id:
|
|
||||||
metrics_registry.increment("poa_proposer_switches_total")
|
|
||||||
self._last_proposer_id = self._config.proposer_id
|
|
||||||
|
|
||||||
self._logger.info(
|
|
||||||
"Proposed block",
|
|
||||||
extra={
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Broadcast the new block
|
|
||||||
tx_list = [tx.content for tx in processed_txs] if processed_txs else []
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"chain_id": self._config.chain_id,
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"parent_hash": block.parent_hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
"timestamp": block.timestamp.isoformat(),
|
|
||||||
"tx_count": block.tx_count,
|
|
||||||
"state_root": block.state_root,
|
|
||||||
"transactions": tx_list,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _ensure_genesis_block(self) -> None:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
if head is not None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Use a deterministic genesis timestamp so all nodes agree on the genesis block hash
|
|
||||||
timestamp = datetime(2025, 1, 1, 0, 0, 0)
|
|
||||||
block_hash = self._compute_block_hash(0, "0x00", timestamp)
|
|
||||||
genesis = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=0,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash="0x00",
|
|
||||||
proposer=self._config.proposer_id, # Use configured proposer as genesis proposer
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(genesis)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
# Initialize accounts from genesis allocations file (if present)
|
|
||||||
await self._initialize_genesis_allocations(session)
|
|
||||||
|
|
||||||
# Broadcast genesis block for initial sync
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"chain_id": self._config.chain_id,
|
|
||||||
"height": genesis.height,
|
|
||||||
"hash": genesis.hash,
|
|
||||||
"parent_hash": genesis.parent_hash,
|
|
||||||
"proposer": genesis.proposer,
|
|
||||||
"timestamp": genesis.timestamp.isoformat(),
|
|
||||||
"tx_count": genesis.tx_count,
|
|
||||||
"state_root": genesis.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _initialize_genesis_allocations(self, session: Session) -> None:
|
|
||||||
"""Create Account entries from the genesis allocations file."""
|
|
||||||
# Use standardized data directory from configuration
|
|
||||||
from ..config import settings
|
|
||||||
|
|
||||||
genesis_paths = [
|
|
||||||
Path(f"/var/lib/aitbc/data/{self._config.chain_id}/genesis.json"), # Standard location
|
|
||||||
]
|
|
||||||
|
|
||||||
genesis_path = None
|
|
||||||
for path in genesis_paths:
|
|
||||||
if path.exists():
|
|
||||||
genesis_path = path
|
|
||||||
break
|
|
||||||
|
|
||||||
if not genesis_path:
|
|
||||||
self._logger.warning("Genesis allocations file not found; skipping account initialization", extra={"paths": str(genesis_paths)})
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(genesis_path) as f:
|
|
||||||
genesis_data = json.load(f)
|
|
||||||
|
|
||||||
allocations = genesis_data.get("allocations", [])
|
|
||||||
created = 0
|
|
||||||
for alloc in allocations:
|
|
||||||
addr = alloc["address"]
|
|
||||||
balance = int(alloc["balance"])
|
|
||||||
nonce = int(alloc.get("nonce", 0))
|
|
||||||
# Check if account already exists (idempotent)
|
|
||||||
acct = session.get(Account, (self._config.chain_id, addr))
|
|
||||||
if acct is None:
|
|
||||||
acct = Account(chain_id=self._config.chain_id, address=addr, balance=balance, nonce=nonce)
|
|
||||||
session.add(acct)
|
|
||||||
created += 1
|
|
||||||
session.commit()
|
|
||||||
self._logger.info("Initialized genesis accounts", extra={"count": created, "total": len(allocations), "path": str(genesis_path)})
|
|
||||||
|
|
||||||
def _fetch_chain_head(self) -> Optional[Block]:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
|
|
||||||
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime, transactions: list = None) -> str:
|
|
||||||
# Include transaction hashes in block hash computation
|
|
||||||
tx_hashes = []
|
|
||||||
if transactions:
|
|
||||||
tx_hashes = [tx.tx_hash for tx in transactions]
|
|
||||||
|
|
||||||
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}|{'|'.join(sorted(tx_hashes))}".encode()
|
|
||||||
return "0x" + hashlib.sha256(payload).hexdigest()
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import hashlib
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Callable, ContextManager, Optional
|
|
||||||
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from ..logger import get_logger
|
|
||||||
from ..metrics import metrics_registry
|
|
||||||
from ..config import ProposerConfig
|
|
||||||
from ..models import Block
|
|
||||||
from ..gossip import gossip_broker
|
|
||||||
|
|
||||||
_METRIC_KEY_SANITIZE = re.compile(r"[^a-zA-Z0-9_]")
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_metric_suffix(value: str) -> str:
|
|
||||||
sanitized = _METRIC_KEY_SANITIZE.sub("_", value).strip("_")
|
|
||||||
return sanitized or "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
class CircuitBreaker:
|
|
||||||
def __init__(self, threshold: int, timeout: int):
|
|
||||||
self._threshold = threshold
|
|
||||||
self._timeout = timeout
|
|
||||||
self._failures = 0
|
|
||||||
self._last_failure_time = 0.0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self) -> str:
|
|
||||||
if self._state == "open":
|
|
||||||
if time.time() - self._last_failure_time > self._timeout:
|
|
||||||
self._state = "half-open"
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def allow_request(self) -> bool:
|
|
||||||
state = self.state
|
|
||||||
if state == "closed":
|
|
||||||
return True
|
|
||||||
if state == "half-open":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def record_failure(self) -> None:
|
|
||||||
self._failures += 1
|
|
||||||
self._last_failure_time = time.time()
|
|
||||||
if self._failures >= self._threshold:
|
|
||||||
self._state = "open"
|
|
||||||
|
|
||||||
def record_success(self) -> None:
|
|
||||||
self._failures = 0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
class PoAProposer:
|
|
||||||
"""Proof-of-Authority block proposer.
|
|
||||||
|
|
||||||
Responsible for periodically proposing blocks if this node is configured as a proposer.
|
|
||||||
In the real implementation, this would involve checking the mempool, validating transactions,
|
|
||||||
and signing the block.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
config: ProposerConfig,
|
|
||||||
session_factory: Callable[[], ContextManager[Session]],
|
|
||||||
) -> None:
|
|
||||||
self._config = config
|
|
||||||
self._session_factory = session_factory
|
|
||||||
self._logger = get_logger(__name__)
|
|
||||||
self._stop_event = asyncio.Event()
|
|
||||||
self._task: Optional[asyncio.Task[None]] = None
|
|
||||||
self._last_proposer_id: Optional[str] = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
if self._task is not None:
|
|
||||||
return
|
|
||||||
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
|
||||||
self._ensure_genesis_block()
|
|
||||||
self._stop_event.clear()
|
|
||||||
self._task = asyncio.create_task(self._run_loop())
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
if self._task is None:
|
|
||||||
return
|
|
||||||
self._logger.info("Stopping PoA proposer loop")
|
|
||||||
self._stop_event.set()
|
|
||||||
await self._task
|
|
||||||
self._task = None
|
|
||||||
|
|
||||||
async def _run_loop(self) -> None:
|
|
||||||
while not self._stop_event.is_set():
|
|
||||||
await self._wait_until_next_slot()
|
|
||||||
if self._stop_event.is_set():
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
self._propose_block()
|
|
||||||
except Exception as exc: # pragma: no cover - defensive logging
|
|
||||||
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
|
||||||
|
|
||||||
async def _wait_until_next_slot(self) -> None:
|
|
||||||
head = self._fetch_chain_head()
|
|
||||||
if head is None:
|
|
||||||
return
|
|
||||||
now = datetime.utcnow()
|
|
||||||
elapsed = (now - head.timestamp).total_seconds()
|
|
||||||
sleep_for = max(self._config.interval_seconds - elapsed, 0.1)
|
|
||||||
if sleep_for <= 0:
|
|
||||||
sleep_for = 0.1
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
return
|
|
||||||
|
|
||||||
async def _propose_block(self) -> None:
|
|
||||||
# Check internal mempool
|
|
||||||
from ..mempool import get_mempool
|
|
||||||
if get_mempool().size(self._config.chain_id) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
next_height = 0
|
|
||||||
parent_hash = "0x00"
|
|
||||||
interval_seconds: Optional[float] = None
|
|
||||||
if head is not None:
|
|
||||||
next_height = head.height + 1
|
|
||||||
parent_hash = head.hash
|
|
||||||
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
|
||||||
|
|
||||||
timestamp = datetime.utcnow()
|
|
||||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
|
||||||
|
|
||||||
block = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=next_height,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash=parent_hash,
|
|
||||||
proposer=self._config.proposer_id,
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(block)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
metrics_registry.increment("blocks_proposed_total")
|
|
||||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
|
||||||
if interval_seconds is not None and interval_seconds >= 0:
|
|
||||||
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
|
||||||
metrics_registry.set_gauge("poa_last_block_interval_seconds", float(interval_seconds))
|
|
||||||
|
|
||||||
proposer_suffix = _sanitize_metric_suffix(self._config.proposer_id)
|
|
||||||
metrics_registry.increment(f"poa_blocks_proposed_total_{proposer_suffix}")
|
|
||||||
if self._last_proposer_id is not None and self._last_proposer_id != self._config.proposer_id:
|
|
||||||
metrics_registry.increment("poa_proposer_switches_total")
|
|
||||||
self._last_proposer_id = self._config.proposer_id
|
|
||||||
|
|
||||||
self._logger.info(
|
|
||||||
"Proposed block",
|
|
||||||
extra={
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Broadcast the new block
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"parent_hash": block.parent_hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
"timestamp": block.timestamp.isoformat(),
|
|
||||||
"tx_count": block.tx_count,
|
|
||||||
"state_root": block.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _ensure_genesis_block(self) -> None:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
if head is not None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Use a deterministic genesis timestamp so all nodes agree on the genesis block hash
|
|
||||||
timestamp = datetime(2025, 1, 1, 0, 0, 0)
|
|
||||||
block_hash = self._compute_block_hash(0, "0x00", timestamp)
|
|
||||||
genesis = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=0,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash="0x00",
|
|
||||||
proposer="genesis",
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(genesis)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
# Broadcast genesis block for initial sync
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"height": genesis.height,
|
|
||||||
"hash": genesis.hash,
|
|
||||||
"parent_hash": genesis.parent_hash,
|
|
||||||
"proposer": genesis.proposer,
|
|
||||||
"timestamp": genesis.timestamp.isoformat(),
|
|
||||||
"tx_count": genesis.tx_count,
|
|
||||||
"state_root": genesis.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
def _fetch_chain_head(self) -> Optional[Block]:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
|
|
||||||
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime) -> str:
|
|
||||||
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}".encode()
|
|
||||||
return "0x" + hashlib.sha256(payload).hexdigest()
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
--- apps/blockchain-node/src/aitbc_chain/consensus/poa.py
|
|
||||||
+++ apps/blockchain-node/src/aitbc_chain/consensus/poa.py
|
|
||||||
@@ -101,7 +101,7 @@
|
|
||||||
# Wait for interval before proposing next block
|
|
||||||
await asyncio.sleep(self.config.interval_seconds)
|
|
||||||
|
|
||||||
- self._propose_block()
|
|
||||||
+ await self._propose_block()
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .poa import PoAProposer, ProposerConfig, CircuitBreaker
|
|
||||||
|
|
||||||
__all__ = ["PoAProposer", "ProposerConfig", "CircuitBreaker"]
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
"""
|
|
||||||
Validator Key Management
|
|
||||||
Handles cryptographic key operations for validators
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Optional, Tuple
|
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ValidatorKeyPair:
|
|
||||||
address: str
|
|
||||||
private_key_pem: str
|
|
||||||
public_key_pem: str
|
|
||||||
created_at: float
|
|
||||||
last_rotated: float
|
|
||||||
|
|
||||||
class KeyManager:
|
|
||||||
"""Manages validator cryptographic keys"""
|
|
||||||
|
|
||||||
def __init__(self, keys_dir: str = "/opt/aitbc/keys"):
|
|
||||||
self.keys_dir = keys_dir
|
|
||||||
self.key_pairs: Dict[str, ValidatorKeyPair] = {}
|
|
||||||
self._ensure_keys_directory()
|
|
||||||
self._load_existing_keys()
|
|
||||||
|
|
||||||
def _ensure_keys_directory(self):
|
|
||||||
"""Ensure keys directory exists and has proper permissions"""
|
|
||||||
os.makedirs(self.keys_dir, mode=0o700, exist_ok=True)
|
|
||||||
|
|
||||||
def _load_existing_keys(self):
|
|
||||||
"""Load existing key pairs from disk"""
|
|
||||||
keys_file = os.path.join(self.keys_dir, "validator_keys.json")
|
|
||||||
|
|
||||||
if os.path.exists(keys_file):
|
|
||||||
try:
|
|
||||||
with open(keys_file, 'r') as f:
|
|
||||||
keys_data = json.load(f)
|
|
||||||
|
|
||||||
for address, key_data in keys_data.items():
|
|
||||||
self.key_pairs[address] = ValidatorKeyPair(
|
|
||||||
address=address,
|
|
||||||
private_key_pem=key_data['private_key_pem'],
|
|
||||||
public_key_pem=key_data['public_key_pem'],
|
|
||||||
created_at=key_data['created_at'],
|
|
||||||
last_rotated=key_data['last_rotated']
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error loading keys: {e}")
|
|
||||||
|
|
||||||
def generate_key_pair(self, address: str) -> ValidatorKeyPair:
|
|
||||||
"""Generate new RSA key pair for validator"""
|
|
||||||
# Generate private key
|
|
||||||
private_key = rsa.generate_private_key(
|
|
||||||
public_exponent=65537,
|
|
||||||
key_size=2048,
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Serialize private key
|
|
||||||
private_key_pem = private_key.private_bytes(
|
|
||||||
encoding=Encoding.PEM,
|
|
||||||
format=PrivateFormat.PKCS8,
|
|
||||||
encryption_algorithm=NoEncryption()
|
|
||||||
).decode('utf-8')
|
|
||||||
|
|
||||||
# Get public key
|
|
||||||
public_key = private_key.public_key()
|
|
||||||
public_key_pem = public_key.public_bytes(
|
|
||||||
encoding=Encoding.PEM,
|
|
||||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
|
||||||
).decode('utf-8')
|
|
||||||
|
|
||||||
# Create key pair object
|
|
||||||
current_time = time.time()
|
|
||||||
key_pair = ValidatorKeyPair(
|
|
||||||
address=address,
|
|
||||||
private_key_pem=private_key_pem,
|
|
||||||
public_key_pem=public_key_pem,
|
|
||||||
created_at=current_time,
|
|
||||||
last_rotated=current_time
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store key pair
|
|
||||||
self.key_pairs[address] = key_pair
|
|
||||||
self._save_keys()
|
|
||||||
|
|
||||||
return key_pair
|
|
||||||
|
|
||||||
def get_key_pair(self, address: str) -> Optional[ValidatorKeyPair]:
|
|
||||||
"""Get key pair for validator"""
|
|
||||||
return self.key_pairs.get(address)
|
|
||||||
|
|
||||||
def rotate_key(self, address: str) -> Optional[ValidatorKeyPair]:
|
|
||||||
"""Rotate validator keys"""
|
|
||||||
if address not in self.key_pairs:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Generate new key pair
|
|
||||||
new_key_pair = self.generate_key_pair(address)
|
|
||||||
|
|
||||||
# Update rotation time
|
|
||||||
new_key_pair.created_at = self.key_pairs[address].created_at
|
|
||||||
new_key_pair.last_rotated = time.time()
|
|
||||||
|
|
||||||
self._save_keys()
|
|
||||||
return new_key_pair
|
|
||||||
|
|
||||||
def sign_message(self, address: str, message: str) -> Optional[str]:
|
|
||||||
"""Sign message with validator private key"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Load private key from PEM
|
|
||||||
private_key = serialization.load_pem_private_key(
|
|
||||||
key_pair.private_key_pem.encode(),
|
|
||||||
password=None,
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sign message
|
|
||||||
signature = private_key.sign(
|
|
||||||
message.encode('utf-8'),
|
|
||||||
hashes.SHA256(),
|
|
||||||
default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature.hex()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error signing message: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def verify_signature(self, address: str, message: str, signature: str) -> bool:
|
|
||||||
"""Verify message signature"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Load public key from PEM
|
|
||||||
public_key = serialization.load_pem_public_key(
|
|
||||||
key_pair.public_key_pem.encode(),
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify signature
|
|
||||||
public_key.verify(
|
|
||||||
bytes.fromhex(signature),
|
|
||||||
message.encode('utf-8'),
|
|
||||||
hashes.SHA256(),
|
|
||||||
default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error verifying signature: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_public_key_pem(self, address: str) -> Optional[str]:
|
|
||||||
"""Get public key PEM for validator"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
return key_pair.public_key_pem if key_pair else None
|
|
||||||
|
|
||||||
def _save_keys(self):
|
|
||||||
"""Save key pairs to disk"""
|
|
||||||
keys_file = os.path.join(self.keys_dir, "validator_keys.json")
|
|
||||||
|
|
||||||
keys_data = {}
|
|
||||||
for address, key_pair in self.key_pairs.items():
|
|
||||||
keys_data[address] = {
|
|
||||||
'private_key_pem': key_pair.private_key_pem,
|
|
||||||
'public_key_pem': key_pair.public_key_pem,
|
|
||||||
'created_at': key_pair.created_at,
|
|
||||||
'last_rotated': key_pair.last_rotated
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(keys_file, 'w') as f:
|
|
||||||
json.dump(keys_data, f, indent=2)
|
|
||||||
|
|
||||||
# Set secure permissions
|
|
||||||
os.chmod(keys_file, 0o600)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error saving keys: {e}")
|
|
||||||
|
|
||||||
def should_rotate_key(self, address: str, rotation_interval: int = 86400) -> bool:
|
|
||||||
"""Check if key should be rotated (default: 24 hours)"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return (time.time() - key_pair.last_rotated) >= rotation_interval
|
|
||||||
|
|
||||||
def get_key_age(self, address: str) -> Optional[float]:
|
|
||||||
"""Get age of key in seconds"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return time.time() - key_pair.created_at
|
|
||||||
|
|
||||||
# Global key manager
|
|
||||||
key_manager = KeyManager()
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
"""
|
|
||||||
Multi-Validator Proof of Authority Consensus Implementation
|
|
||||||
Extends single validator PoA to support multiple validators with rotation
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import hashlib
|
|
||||||
from typing import List, Dict, Optional, Set
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from ..config import settings
|
|
||||||
from ..models import Block, Transaction
|
|
||||||
from ..database import session_scope
|
|
||||||
|
|
||||||
class ValidatorRole(Enum):
|
|
||||||
PROPOSER = "proposer"
|
|
||||||
VALIDATOR = "validator"
|
|
||||||
STANDBY = "standby"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Validator:
|
|
||||||
address: str
|
|
||||||
stake: float
|
|
||||||
reputation: float
|
|
||||||
role: ValidatorRole
|
|
||||||
last_proposed: int
|
|
||||||
is_active: bool
|
|
||||||
|
|
||||||
class MultiValidatorPoA:
|
|
||||||
"""Multi-Validator Proof of Authority consensus mechanism"""
|
|
||||||
|
|
||||||
def __init__(self, chain_id: str):
|
|
||||||
self.chain_id = chain_id
|
|
||||||
self.validators: Dict[str, Validator] = {}
|
|
||||||
self.current_proposer_index = 0
|
|
||||||
self.round_robin_enabled = True
|
|
||||||
self.consensus_timeout = 30 # seconds
|
|
||||||
|
|
||||||
def add_validator(self, address: str, stake: float = 1000.0) -> bool:
|
|
||||||
"""Add a new validator to the consensus"""
|
|
||||||
if address in self.validators:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.validators[address] = Validator(
|
|
||||||
address=address,
|
|
||||||
stake=stake,
|
|
||||||
reputation=1.0,
|
|
||||||
role=ValidatorRole.STANDBY,
|
|
||||||
last_proposed=0,
|
|
||||||
is_active=True
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def remove_validator(self, address: str) -> bool:
|
|
||||||
"""Remove a validator from the consensus"""
|
|
||||||
if address not in self.validators:
|
|
||||||
return False
|
|
||||||
|
|
||||||
validator = self.validators[address]
|
|
||||||
validator.is_active = False
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
return True
|
|
||||||
|
|
||||||
def select_proposer(self, block_height: int) -> Optional[str]:
|
|
||||||
"""Select proposer for the current block using round-robin"""
|
|
||||||
active_validators = [
|
|
||||||
v for v in self.validators.values()
|
|
||||||
if v.is_active and v.role in [ValidatorRole.PROPOSER, ValidatorRole.VALIDATOR]
|
|
||||||
]
|
|
||||||
|
|
||||||
if not active_validators:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Round-robin selection
|
|
||||||
proposer_index = block_height % len(active_validators)
|
|
||||||
return active_validators[proposer_index].address
|
|
||||||
|
|
||||||
def validate_block(self, block: Block, proposer: str) -> bool:
|
|
||||||
"""Validate a proposed block"""
|
|
||||||
if proposer not in self.validators:
|
|
||||||
return False
|
|
||||||
|
|
||||||
validator = self.validators[proposer]
|
|
||||||
if not validator.is_active:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if validator is allowed to propose
|
|
||||||
if validator.role not in [ValidatorRole.PROPOSER, ValidatorRole.VALIDATOR]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Additional validation logic here
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_consensus_participants(self) -> List[str]:
|
|
||||||
"""Get list of active consensus participants"""
|
|
||||||
return [
|
|
||||||
v.address for v in self.validators.values()
|
|
||||||
if v.is_active and v.role in [ValidatorRole.PROPOSER, ValidatorRole.VALIDATOR]
|
|
||||||
]
|
|
||||||
|
|
||||||
def update_validator_reputation(self, address: str, delta: float) -> bool:
|
|
||||||
"""Update validator reputation"""
|
|
||||||
if address not in self.validators:
|
|
||||||
return False
|
|
||||||
|
|
||||||
validator = self.validators[address]
|
|
||||||
validator.reputation = max(0.0, min(1.0, validator.reputation + delta))
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Global consensus instance
|
|
||||||
consensus_instances: Dict[str, MultiValidatorPoA] = {}
|
|
||||||
|
|
||||||
def get_consensus(chain_id: str) -> MultiValidatorPoA:
|
|
||||||
"""Get or create consensus instance for chain"""
|
|
||||||
if chain_id not in consensus_instances:
|
|
||||||
consensus_instances[chain_id] = MultiValidatorPoA(chain_id)
|
|
||||||
return consensus_instances[chain_id]
|
|
||||||
@@ -1,193 +0,0 @@
|
|||||||
"""
|
|
||||||
Practical Byzantine Fault Tolerance (PBFT) Consensus Implementation
|
|
||||||
Provides Byzantine fault tolerance for up to 1/3 faulty validators
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
import hashlib
|
|
||||||
from typing import List, Dict, Optional, Set, Tuple
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from .multi_validator_poa import MultiValidatorPoA, Validator
|
|
||||||
|
|
||||||
class PBFTPhase(Enum):
|
|
||||||
PRE_PREPARE = "pre_prepare"
|
|
||||||
PREPARE = "prepare"
|
|
||||||
COMMIT = "commit"
|
|
||||||
EXECUTE = "execute"
|
|
||||||
|
|
||||||
class PBFTMessageType(Enum):
|
|
||||||
PRE_PREPARE = "pre_prepare"
|
|
||||||
PREPARE = "prepare"
|
|
||||||
COMMIT = "commit"
|
|
||||||
VIEW_CHANGE = "view_change"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PBFTMessage:
|
|
||||||
message_type: PBFTMessageType
|
|
||||||
sender: str
|
|
||||||
view_number: int
|
|
||||||
sequence_number: int
|
|
||||||
digest: str
|
|
||||||
signature: str
|
|
||||||
timestamp: float
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PBFTState:
|
|
||||||
current_view: int
|
|
||||||
current_sequence: int
|
|
||||||
prepared_messages: Dict[str, List[PBFTMessage]]
|
|
||||||
committed_messages: Dict[str, List[PBFTMessage]]
|
|
||||||
pre_prepare_messages: Dict[str, PBFTMessage]
|
|
||||||
|
|
||||||
class PBFTConsensus:
|
|
||||||
"""PBFT consensus implementation"""
|
|
||||||
|
|
||||||
def __init__(self, consensus: MultiValidatorPoA):
|
|
||||||
self.consensus = consensus
|
|
||||||
self.state = PBFTState(
|
|
||||||
current_view=0,
|
|
||||||
current_sequence=0,
|
|
||||||
prepared_messages={},
|
|
||||||
committed_messages={},
|
|
||||||
pre_prepare_messages={}
|
|
||||||
)
|
|
||||||
self.fault_tolerance = max(1, len(consensus.get_consensus_participants()) // 3)
|
|
||||||
self.required_messages = 2 * self.fault_tolerance + 1
|
|
||||||
|
|
||||||
def get_message_digest(self, block_hash: str, sequence: int, view: int) -> str:
|
|
||||||
"""Generate message digest for PBFT"""
|
|
||||||
content = f"{block_hash}:{sequence}:{view}"
|
|
||||||
return hashlib.sha256(content.encode()).hexdigest()
|
|
||||||
|
|
||||||
async def pre_prepare_phase(self, proposer: str, block_hash: str) -> bool:
|
|
||||||
"""Phase 1: Pre-prepare"""
|
|
||||||
sequence = self.state.current_sequence + 1
|
|
||||||
view = self.state.current_view
|
|
||||||
digest = self.get_message_digest(block_hash, sequence, view)
|
|
||||||
|
|
||||||
message = PBFTMessage(
|
|
||||||
message_type=PBFTMessageType.PRE_PREPARE,
|
|
||||||
sender=proposer,
|
|
||||||
view_number=view,
|
|
||||||
sequence_number=sequence,
|
|
||||||
digest=digest,
|
|
||||||
signature="", # Would be signed in real implementation
|
|
||||||
timestamp=time.time()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store pre-prepare message
|
|
||||||
key = f"{sequence}:{view}"
|
|
||||||
self.state.pre_prepare_messages[key] = message
|
|
||||||
|
|
||||||
# Broadcast to all validators
|
|
||||||
await self._broadcast_message(message)
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def prepare_phase(self, validator: str, pre_prepare_msg: PBFTMessage) -> bool:
|
|
||||||
"""Phase 2: Prepare"""
|
|
||||||
key = f"{pre_prepare_msg.sequence_number}:{pre_prepare_msg.view_number}"
|
|
||||||
|
|
||||||
if key not in self.state.pre_prepare_messages:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Create prepare message
|
|
||||||
prepare_msg = PBFTMessage(
|
|
||||||
message_type=PBFTMessageType.PREPARE,
|
|
||||||
sender=validator,
|
|
||||||
view_number=pre_prepare_msg.view_number,
|
|
||||||
sequence_number=pre_prepare_msg.sequence_number,
|
|
||||||
digest=pre_prepare_msg.digest,
|
|
||||||
signature="", # Would be signed
|
|
||||||
timestamp=time.time()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store prepare message
|
|
||||||
if key not in self.state.prepared_messages:
|
|
||||||
self.state.prepared_messages[key] = []
|
|
||||||
self.state.prepared_messages[key].append(prepare_msg)
|
|
||||||
|
|
||||||
# Broadcast prepare message
|
|
||||||
await self._broadcast_message(prepare_msg)
|
|
||||||
|
|
||||||
# Check if we have enough prepare messages
|
|
||||||
return len(self.state.prepared_messages[key]) >= self.required_messages
|
|
||||||
|
|
||||||
async def commit_phase(self, validator: str, prepare_msg: PBFTMessage) -> bool:
|
|
||||||
"""Phase 3: Commit"""
|
|
||||||
key = f"{prepare_msg.sequence_number}:{prepare_msg.view_number}"
|
|
||||||
|
|
||||||
# Create commit message
|
|
||||||
commit_msg = PBFTMessage(
|
|
||||||
message_type=PBFTMessageType.COMMIT,
|
|
||||||
sender=validator,
|
|
||||||
view_number=prepare_msg.view_number,
|
|
||||||
sequence_number=prepare_msg.sequence_number,
|
|
||||||
digest=prepare_msg.digest,
|
|
||||||
signature="", # Would be signed
|
|
||||||
timestamp=time.time()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store commit message
|
|
||||||
if key not in self.state.committed_messages:
|
|
||||||
self.state.committed_messages[key] = []
|
|
||||||
self.state.committed_messages[key].append(commit_msg)
|
|
||||||
|
|
||||||
# Broadcast commit message
|
|
||||||
await self._broadcast_message(commit_msg)
|
|
||||||
|
|
||||||
# Check if we have enough commit messages
|
|
||||||
if len(self.state.committed_messages[key]) >= self.required_messages:
|
|
||||||
return await self.execute_phase(key)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def execute_phase(self, key: str) -> bool:
|
|
||||||
"""Phase 4: Execute"""
|
|
||||||
# Extract sequence and view from key
|
|
||||||
sequence, view = map(int, key.split(':'))
|
|
||||||
|
|
||||||
# Update state
|
|
||||||
self.state.current_sequence = sequence
|
|
||||||
|
|
||||||
# Clean up old messages
|
|
||||||
self._cleanup_messages(sequence)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def _broadcast_message(self, message: PBFTMessage):
|
|
||||||
"""Broadcast message to all validators"""
|
|
||||||
validators = self.consensus.get_consensus_participants()
|
|
||||||
|
|
||||||
for validator in validators:
|
|
||||||
if validator != message.sender:
|
|
||||||
# In real implementation, this would send over network
|
|
||||||
await self._send_to_validator(validator, message)
|
|
||||||
|
|
||||||
async def _send_to_validator(self, validator: str, message: PBFTMessage):
|
|
||||||
"""Send message to specific validator"""
|
|
||||||
# Network communication would be implemented here
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _cleanup_messages(self, sequence: int):
|
|
||||||
"""Clean up old messages to prevent memory leaks"""
|
|
||||||
old_keys = [
|
|
||||||
key for key in self.state.prepared_messages.keys()
|
|
||||||
if int(key.split(':')[0]) < sequence
|
|
||||||
]
|
|
||||||
|
|
||||||
for key in old_keys:
|
|
||||||
self.state.prepared_messages.pop(key, None)
|
|
||||||
self.state.committed_messages.pop(key, None)
|
|
||||||
self.state.pre_prepare_messages.pop(key, None)
|
|
||||||
|
|
||||||
def handle_view_change(self, new_view: int) -> bool:
|
|
||||||
"""Handle view change when proposer fails"""
|
|
||||||
self.state.current_view = new_view
|
|
||||||
# Reset state for new view
|
|
||||||
self.state.prepared_messages.clear()
|
|
||||||
self.state.committed_messages.clear()
|
|
||||||
self.state.pre_prepare_messages.clear()
|
|
||||||
return True
|
|
||||||
@@ -1,345 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Callable, ContextManager, Optional
|
|
||||||
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from ..logger import get_logger
|
|
||||||
from ..metrics import metrics_registry
|
|
||||||
from ..config import ProposerConfig
|
|
||||||
from ..models import Block, Account
|
|
||||||
from ..gossip import gossip_broker
|
|
||||||
|
|
||||||
_METRIC_KEY_SANITIZE = re.compile(r"[^a-zA-Z0-9_]")
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_metric_suffix(value: str) -> str:
|
|
||||||
sanitized = _METRIC_KEY_SANITIZE.sub("_", value).strip("_")
|
|
||||||
return sanitized or "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
class CircuitBreaker:
|
|
||||||
def __init__(self, threshold: int, timeout: int):
|
|
||||||
self._threshold = threshold
|
|
||||||
self._timeout = timeout
|
|
||||||
self._failures = 0
|
|
||||||
self._last_failure_time = 0.0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self) -> str:
|
|
||||||
if self._state == "open":
|
|
||||||
if time.time() - self._last_failure_time > self._timeout:
|
|
||||||
self._state = "half-open"
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def allow_request(self) -> bool:
|
|
||||||
state = self.state
|
|
||||||
if state == "closed":
|
|
||||||
return True
|
|
||||||
if state == "half-open":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def record_failure(self) -> None:
|
|
||||||
self._failures += 1
|
|
||||||
self._last_failure_time = time.time()
|
|
||||||
if self._failures >= self._threshold:
|
|
||||||
self._state = "open"
|
|
||||||
|
|
||||||
def record_success(self) -> None:
|
|
||||||
self._failures = 0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
class PoAProposer:
|
|
||||||
"""Proof-of-Authority block proposer.
|
|
||||||
|
|
||||||
Responsible for periodically proposing blocks if this node is configured as a proposer.
|
|
||||||
In the real implementation, this would involve checking the mempool, validating transactions,
|
|
||||||
and signing the block.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
config: ProposerConfig,
|
|
||||||
session_factory: Callable[[], ContextManager[Session]],
|
|
||||||
) -> None:
|
|
||||||
self._config = config
|
|
||||||
self._session_factory = session_factory
|
|
||||||
self._logger = get_logger(__name__)
|
|
||||||
self._stop_event = asyncio.Event()
|
|
||||||
self._task: Optional[asyncio.Task[None]] = None
|
|
||||||
self._last_proposer_id: Optional[str] = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
if self._task is not None:
|
|
||||||
return
|
|
||||||
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
|
||||||
await self._ensure_genesis_block()
|
|
||||||
self._stop_event.clear()
|
|
||||||
self._task = asyncio.create_task(self._run_loop())
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
if self._task is None:
|
|
||||||
return
|
|
||||||
self._logger.info("Stopping PoA proposer loop")
|
|
||||||
self._stop_event.set()
|
|
||||||
await self._task
|
|
||||||
self._task = None
|
|
||||||
|
|
||||||
async def _run_loop(self) -> None:
|
|
||||||
while not self._stop_event.is_set():
|
|
||||||
await self._wait_until_next_slot()
|
|
||||||
if self._stop_event.is_set():
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
await self._propose_block()
|
|
||||||
except Exception as exc: # pragma: no cover - defensive logging
|
|
||||||
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
|
||||||
|
|
||||||
async def _wait_until_next_slot(self) -> None:
|
|
||||||
head = self._fetch_chain_head()
|
|
||||||
if head is None:
|
|
||||||
return
|
|
||||||
now = datetime.utcnow()
|
|
||||||
elapsed = (now - head.timestamp).total_seconds()
|
|
||||||
sleep_for = max(self._config.interval_seconds - elapsed, 0.1)
|
|
||||||
if sleep_for <= 0:
|
|
||||||
sleep_for = 0.1
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
return
|
|
||||||
|
|
||||||
async def _propose_block(self) -> None:
|
|
||||||
# Check internal mempool and include transactions
|
|
||||||
from ..mempool import get_mempool
|
|
||||||
from ..models import Transaction, Account
|
|
||||||
mempool = get_mempool()
|
|
||||||
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
next_height = 0
|
|
||||||
parent_hash = "0x00"
|
|
||||||
interval_seconds: Optional[float] = None
|
|
||||||
if head is not None:
|
|
||||||
next_height = head.height + 1
|
|
||||||
parent_hash = head.hash
|
|
||||||
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
|
||||||
|
|
||||||
timestamp = datetime.utcnow()
|
|
||||||
|
|
||||||
# Pull transactions from mempool
|
|
||||||
max_txs = self._config.max_txs_per_block
|
|
||||||
max_bytes = self._config.max_block_size_bytes
|
|
||||||
pending_txs = mempool.drain(max_txs, max_bytes, self._config.chain_id)
|
|
||||||
self._logger.info(f"[PROPOSE] drained {len(pending_txs)} txs from mempool, chain={self._config.chain_id}")
|
|
||||||
|
|
||||||
# Process transactions and update balances
|
|
||||||
processed_txs = []
|
|
||||||
for tx in pending_txs:
|
|
||||||
try:
|
|
||||||
# Parse transaction data
|
|
||||||
tx_data = tx.content
|
|
||||||
sender = tx_data.get("from")
|
|
||||||
recipient = tx_data.get("to")
|
|
||||||
value = tx_data.get("amount", 0)
|
|
||||||
fee = tx_data.get("fee", 0)
|
|
||||||
|
|
||||||
if not sender or not recipient:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get sender account
|
|
||||||
sender_account = session.get(Account, (self._config.chain_id, sender))
|
|
||||||
if not sender_account:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check sufficient balance
|
|
||||||
total_cost = value + fee
|
|
||||||
if sender_account.balance < total_cost:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get or create recipient account
|
|
||||||
recipient_account = session.get(Account, (self._config.chain_id, recipient))
|
|
||||||
if not recipient_account:
|
|
||||||
recipient_account = Account(chain_id=self._config.chain_id, address=recipient, balance=0, nonce=0)
|
|
||||||
session.add(recipient_account)
|
|
||||||
session.flush()
|
|
||||||
|
|
||||||
# Update balances
|
|
||||||
sender_account.balance -= total_cost
|
|
||||||
sender_account.nonce += 1
|
|
||||||
recipient_account.balance += value
|
|
||||||
|
|
||||||
# Create transaction record
|
|
||||||
transaction = Transaction(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
tx_hash=tx.tx_hash,
|
|
||||||
sender=sender,
|
|
||||||
recipient=recipient,
|
|
||||||
payload=tx_data,
|
|
||||||
value=value,
|
|
||||||
fee=fee,
|
|
||||||
nonce=sender_account.nonce - 1,
|
|
||||||
timestamp=timestamp,
|
|
||||||
block_height=next_height,
|
|
||||||
status="confirmed"
|
|
||||||
)
|
|
||||||
session.add(transaction)
|
|
||||||
processed_txs.append(tx)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self._logger.warning(f"Failed to process transaction {tx.tx_hash}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Compute block hash with transaction data
|
|
||||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp, processed_txs)
|
|
||||||
|
|
||||||
block = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=next_height,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash=parent_hash,
|
|
||||||
proposer=self._config.proposer_id,
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=len(processed_txs),
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(block)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
metrics_registry.increment("blocks_proposed_total")
|
|
||||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
|
||||||
if interval_seconds is not None and interval_seconds >= 0:
|
|
||||||
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
|
||||||
metrics_registry.set_gauge("poa_last_block_interval_seconds", float(interval_seconds))
|
|
||||||
|
|
||||||
proposer_suffix = _sanitize_metric_suffix(self._config.proposer_id)
|
|
||||||
metrics_registry.increment(f"poa_blocks_proposed_total_{proposer_suffix}")
|
|
||||||
if self._last_proposer_id is not None and self._last_proposer_id != self._config.proposer_id:
|
|
||||||
metrics_registry.increment("poa_proposer_switches_total")
|
|
||||||
self._last_proposer_id = self._config.proposer_id
|
|
||||||
|
|
||||||
self._logger.info(
|
|
||||||
"Proposed block",
|
|
||||||
extra={
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Broadcast the new block
|
|
||||||
tx_list = [tx.content for tx in processed_txs] if processed_txs else []
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"chain_id": self._config.chain_id,
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"parent_hash": block.parent_hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
"timestamp": block.timestamp.isoformat(),
|
|
||||||
"tx_count": block.tx_count,
|
|
||||||
"state_root": block.state_root,
|
|
||||||
"transactions": tx_list,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _ensure_genesis_block(self) -> None:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
if head is not None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Use a deterministic genesis timestamp so all nodes agree on the genesis block hash
|
|
||||||
timestamp = datetime(2025, 1, 1, 0, 0, 0)
|
|
||||||
block_hash = self._compute_block_hash(0, "0x00", timestamp)
|
|
||||||
genesis = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=0,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash="0x00",
|
|
||||||
proposer=self._config.proposer_id, # Use configured proposer as genesis proposer
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(genesis)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
# Initialize accounts from genesis allocations file (if present)
|
|
||||||
await self._initialize_genesis_allocations(session)
|
|
||||||
|
|
||||||
# Broadcast genesis block for initial sync
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"chain_id": self._config.chain_id,
|
|
||||||
"height": genesis.height,
|
|
||||||
"hash": genesis.hash,
|
|
||||||
"parent_hash": genesis.parent_hash,
|
|
||||||
"proposer": genesis.proposer,
|
|
||||||
"timestamp": genesis.timestamp.isoformat(),
|
|
||||||
"tx_count": genesis.tx_count,
|
|
||||||
"state_root": genesis.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _initialize_genesis_allocations(self, session: Session) -> None:
|
|
||||||
"""Create Account entries from the genesis allocations file."""
|
|
||||||
# Use standardized data directory from configuration
|
|
||||||
from ..config import settings
|
|
||||||
|
|
||||||
genesis_paths = [
|
|
||||||
Path(f"/var/lib/aitbc/data/{self._config.chain_id}/genesis.json"), # Standard location
|
|
||||||
]
|
|
||||||
|
|
||||||
genesis_path = None
|
|
||||||
for path in genesis_paths:
|
|
||||||
if path.exists():
|
|
||||||
genesis_path = path
|
|
||||||
break
|
|
||||||
|
|
||||||
if not genesis_path:
|
|
||||||
self._logger.warning("Genesis allocations file not found; skipping account initialization", extra={"paths": str(genesis_paths)})
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(genesis_path) as f:
|
|
||||||
genesis_data = json.load(f)
|
|
||||||
|
|
||||||
allocations = genesis_data.get("allocations", [])
|
|
||||||
created = 0
|
|
||||||
for alloc in allocations:
|
|
||||||
addr = alloc["address"]
|
|
||||||
balance = int(alloc["balance"])
|
|
||||||
nonce = int(alloc.get("nonce", 0))
|
|
||||||
# Check if account already exists (idempotent)
|
|
||||||
acct = session.get(Account, (self._config.chain_id, addr))
|
|
||||||
if acct is None:
|
|
||||||
acct = Account(chain_id=self._config.chain_id, address=addr, balance=balance, nonce=nonce)
|
|
||||||
session.add(acct)
|
|
||||||
created += 1
|
|
||||||
session.commit()
|
|
||||||
self._logger.info("Initialized genesis accounts", extra={"count": created, "total": len(allocations), "path": str(genesis_path)})
|
|
||||||
|
|
||||||
def _fetch_chain_head(self) -> Optional[Block]:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
|
|
||||||
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime, transactions: list = None) -> str:
|
|
||||||
# Include transaction hashes in block hash computation
|
|
||||||
tx_hashes = []
|
|
||||||
if transactions:
|
|
||||||
tx_hashes = [tx.tx_hash for tx in transactions]
|
|
||||||
|
|
||||||
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}|{'|'.join(sorted(tx_hashes))}".encode()
|
|
||||||
return "0x" + hashlib.sha256(payload).hexdigest()
|
|
||||||
@@ -1,229 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import hashlib
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Callable, ContextManager, Optional
|
|
||||||
|
|
||||||
from sqlmodel import Session, select
|
|
||||||
|
|
||||||
from ..logger import get_logger
|
|
||||||
from ..metrics import metrics_registry
|
|
||||||
from ..config import ProposerConfig
|
|
||||||
from ..models import Block
|
|
||||||
from ..gossip import gossip_broker
|
|
||||||
|
|
||||||
_METRIC_KEY_SANITIZE = re.compile(r"[^a-zA-Z0-9_]")
|
|
||||||
|
|
||||||
|
|
||||||
def _sanitize_metric_suffix(value: str) -> str:
|
|
||||||
sanitized = _METRIC_KEY_SANITIZE.sub("_", value).strip("_")
|
|
||||||
return sanitized or "unknown"
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
class CircuitBreaker:
|
|
||||||
def __init__(self, threshold: int, timeout: int):
|
|
||||||
self._threshold = threshold
|
|
||||||
self._timeout = timeout
|
|
||||||
self._failures = 0
|
|
||||||
self._last_failure_time = 0.0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def state(self) -> str:
|
|
||||||
if self._state == "open":
|
|
||||||
if time.time() - self._last_failure_time > self._timeout:
|
|
||||||
self._state = "half-open"
|
|
||||||
return self._state
|
|
||||||
|
|
||||||
def allow_request(self) -> bool:
|
|
||||||
state = self.state
|
|
||||||
if state == "closed":
|
|
||||||
return True
|
|
||||||
if state == "half-open":
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def record_failure(self) -> None:
|
|
||||||
self._failures += 1
|
|
||||||
self._last_failure_time = time.time()
|
|
||||||
if self._failures >= self._threshold:
|
|
||||||
self._state = "open"
|
|
||||||
|
|
||||||
def record_success(self) -> None:
|
|
||||||
self._failures = 0
|
|
||||||
self._state = "closed"
|
|
||||||
|
|
||||||
class PoAProposer:
|
|
||||||
"""Proof-of-Authority block proposer.
|
|
||||||
|
|
||||||
Responsible for periodically proposing blocks if this node is configured as a proposer.
|
|
||||||
In the real implementation, this would involve checking the mempool, validating transactions,
|
|
||||||
and signing the block.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
config: ProposerConfig,
|
|
||||||
session_factory: Callable[[], ContextManager[Session]],
|
|
||||||
) -> None:
|
|
||||||
self._config = config
|
|
||||||
self._session_factory = session_factory
|
|
||||||
self._logger = get_logger(__name__)
|
|
||||||
self._stop_event = asyncio.Event()
|
|
||||||
self._task: Optional[asyncio.Task[None]] = None
|
|
||||||
self._last_proposer_id: Optional[str] = None
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
if self._task is not None:
|
|
||||||
return
|
|
||||||
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
|
||||||
self._ensure_genesis_block()
|
|
||||||
self._stop_event.clear()
|
|
||||||
self._task = asyncio.create_task(self._run_loop())
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
if self._task is None:
|
|
||||||
return
|
|
||||||
self._logger.info("Stopping PoA proposer loop")
|
|
||||||
self._stop_event.set()
|
|
||||||
await self._task
|
|
||||||
self._task = None
|
|
||||||
|
|
||||||
async def _run_loop(self) -> None:
|
|
||||||
while not self._stop_event.is_set():
|
|
||||||
await self._wait_until_next_slot()
|
|
||||||
if self._stop_event.is_set():
|
|
||||||
break
|
|
||||||
try:
|
|
||||||
self._propose_block()
|
|
||||||
except Exception as exc: # pragma: no cover - defensive logging
|
|
||||||
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
|
||||||
|
|
||||||
async def _wait_until_next_slot(self) -> None:
|
|
||||||
head = self._fetch_chain_head()
|
|
||||||
if head is None:
|
|
||||||
return
|
|
||||||
now = datetime.utcnow()
|
|
||||||
elapsed = (now - head.timestamp).total_seconds()
|
|
||||||
sleep_for = max(self._config.interval_seconds - elapsed, 0.1)
|
|
||||||
if sleep_for <= 0:
|
|
||||||
sleep_for = 0.1
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
return
|
|
||||||
|
|
||||||
async def _propose_block(self) -> None:
|
|
||||||
# Check internal mempool
|
|
||||||
from ..mempool import get_mempool
|
|
||||||
if get_mempool().size(self._config.chain_id) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
next_height = 0
|
|
||||||
parent_hash = "0x00"
|
|
||||||
interval_seconds: Optional[float] = None
|
|
||||||
if head is not None:
|
|
||||||
next_height = head.height + 1
|
|
||||||
parent_hash = head.hash
|
|
||||||
interval_seconds = (datetime.utcnow() - head.timestamp).total_seconds()
|
|
||||||
|
|
||||||
timestamp = datetime.utcnow()
|
|
||||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
|
||||||
|
|
||||||
block = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=next_height,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash=parent_hash,
|
|
||||||
proposer=self._config.proposer_id,
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(block)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
metrics_registry.increment("blocks_proposed_total")
|
|
||||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
|
||||||
if interval_seconds is not None and interval_seconds >= 0:
|
|
||||||
metrics_registry.observe("block_interval_seconds", interval_seconds)
|
|
||||||
metrics_registry.set_gauge("poa_last_block_interval_seconds", float(interval_seconds))
|
|
||||||
|
|
||||||
proposer_suffix = _sanitize_metric_suffix(self._config.proposer_id)
|
|
||||||
metrics_registry.increment(f"poa_blocks_proposed_total_{proposer_suffix}")
|
|
||||||
if self._last_proposer_id is not None and self._last_proposer_id != self._config.proposer_id:
|
|
||||||
metrics_registry.increment("poa_proposer_switches_total")
|
|
||||||
self._last_proposer_id = self._config.proposer_id
|
|
||||||
|
|
||||||
self._logger.info(
|
|
||||||
"Proposed block",
|
|
||||||
extra={
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Broadcast the new block
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"height": block.height,
|
|
||||||
"hash": block.hash,
|
|
||||||
"parent_hash": block.parent_hash,
|
|
||||||
"proposer": block.proposer,
|
|
||||||
"timestamp": block.timestamp.isoformat(),
|
|
||||||
"tx_count": block.tx_count,
|
|
||||||
"state_root": block.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _ensure_genesis_block(self) -> None:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
if head is not None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Use a deterministic genesis timestamp so all nodes agree on the genesis block hash
|
|
||||||
timestamp = datetime(2025, 1, 1, 0, 0, 0)
|
|
||||||
block_hash = self._compute_block_hash(0, "0x00", timestamp)
|
|
||||||
genesis = Block(
|
|
||||||
chain_id=self._config.chain_id,
|
|
||||||
height=0,
|
|
||||||
hash=block_hash,
|
|
||||||
parent_hash="0x00",
|
|
||||||
proposer="genesis",
|
|
||||||
timestamp=timestamp,
|
|
||||||
tx_count=0,
|
|
||||||
state_root=None,
|
|
||||||
)
|
|
||||||
session.add(genesis)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
# Broadcast genesis block for initial sync
|
|
||||||
await gossip_broker.publish(
|
|
||||||
"blocks",
|
|
||||||
{
|
|
||||||
"height": genesis.height,
|
|
||||||
"hash": genesis.hash,
|
|
||||||
"parent_hash": genesis.parent_hash,
|
|
||||||
"proposer": genesis.proposer,
|
|
||||||
"timestamp": genesis.timestamp.isoformat(),
|
|
||||||
"tx_count": genesis.tx_count,
|
|
||||||
"state_root": genesis.state_root,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
def _fetch_chain_head(self) -> Optional[Block]:
|
|
||||||
with self._session_factory() as session:
|
|
||||||
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
|
||||||
|
|
||||||
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime) -> str:
|
|
||||||
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}".encode()
|
|
||||||
return "0x" + hashlib.sha256(payload).hexdigest()
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
--- apps/blockchain-node/src/aitbc_chain/consensus/poa.py
|
|
||||||
+++ apps/blockchain-node/src/aitbc_chain/consensus/poa.py
|
|
||||||
@@ -101,7 +101,7 @@
|
|
||||||
# Wait for interval before proposing next block
|
|
||||||
await asyncio.sleep(self.config.interval_seconds)
|
|
||||||
|
|
||||||
- self._propose_block()
|
|
||||||
+ await self._propose_block()
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
"""
|
|
||||||
Validator Rotation Mechanism
|
|
||||||
Handles automatic rotation of validators based on performance and stake
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
from typing import List, Dict, Optional
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from .multi_validator_poa import MultiValidatorPoA, Validator, ValidatorRole
|
|
||||||
|
|
||||||
class RotationStrategy(Enum):
|
|
||||||
ROUND_ROBIN = "round_robin"
|
|
||||||
STAKE_WEIGHTED = "stake_weighted"
|
|
||||||
REPUTATION_BASED = "reputation_based"
|
|
||||||
HYBRID = "hybrid"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RotationConfig:
|
|
||||||
strategy: RotationStrategy
|
|
||||||
rotation_interval: int # blocks
|
|
||||||
min_stake: float
|
|
||||||
reputation_threshold: float
|
|
||||||
max_validators: int
|
|
||||||
|
|
||||||
class ValidatorRotation:
|
|
||||||
"""Manages validator rotation based on various strategies"""
|
|
||||||
|
|
||||||
def __init__(self, consensus: MultiValidatorPoA, config: RotationConfig):
|
|
||||||
self.consensus = consensus
|
|
||||||
self.config = config
|
|
||||||
self.last_rotation_height = 0
|
|
||||||
|
|
||||||
def should_rotate(self, current_height: int) -> bool:
|
|
||||||
"""Check if rotation should occur at current height"""
|
|
||||||
return (current_height - self.last_rotation_height) >= self.config.rotation_interval
|
|
||||||
|
|
||||||
def rotate_validators(self, current_height: int) -> bool:
|
|
||||||
"""Perform validator rotation based on configured strategy"""
|
|
||||||
if not self.should_rotate(current_height):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.config.strategy == RotationStrategy.ROUND_ROBIN:
|
|
||||||
return self._rotate_round_robin()
|
|
||||||
elif self.config.strategy == RotationStrategy.STAKE_WEIGHTED:
|
|
||||||
return self._rotate_stake_weighted()
|
|
||||||
elif self.config.strategy == RotationStrategy.REPUTATION_BASED:
|
|
||||||
return self._rotate_reputation_based()
|
|
||||||
elif self.config.strategy == RotationStrategy.HYBRID:
|
|
||||||
return self._rotate_hybrid()
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _rotate_round_robin(self) -> bool:
|
|
||||||
"""Round-robin rotation of validator roles"""
|
|
||||||
validators = list(self.consensus.validators.values())
|
|
||||||
active_validators = [v for v in validators if v.is_active]
|
|
||||||
|
|
||||||
# Rotate roles among active validators
|
|
||||||
for i, validator in enumerate(active_validators):
|
|
||||||
if i == 0:
|
|
||||||
validator.role = ValidatorRole.PROPOSER
|
|
||||||
elif i < 3: # Top 3 become validators
|
|
||||||
validator.role = ValidatorRole.VALIDATOR
|
|
||||||
else:
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
|
|
||||||
self.last_rotation_height += self.config.rotation_interval
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _rotate_stake_weighted(self) -> bool:
|
|
||||||
"""Stake-weighted rotation"""
|
|
||||||
validators = sorted(
|
|
||||||
[v for v in self.consensus.validators.values() if v.is_active],
|
|
||||||
key=lambda v: v.stake,
|
|
||||||
reverse=True
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, validator in enumerate(validators[:self.config.max_validators]):
|
|
||||||
if i == 0:
|
|
||||||
validator.role = ValidatorRole.PROPOSER
|
|
||||||
elif i < 4:
|
|
||||||
validator.role = ValidatorRole.VALIDATOR
|
|
||||||
else:
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
|
|
||||||
self.last_rotation_height += self.config.rotation_interval
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _rotate_reputation_based(self) -> bool:
|
|
||||||
"""Reputation-based rotation"""
|
|
||||||
validators = sorted(
|
|
||||||
[v for v in self.consensus.validators.values() if v.is_active],
|
|
||||||
key=lambda v: v.reputation,
|
|
||||||
reverse=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter by reputation threshold
|
|
||||||
qualified_validators = [
|
|
||||||
v for v in validators
|
|
||||||
if v.reputation >= self.config.reputation_threshold
|
|
||||||
]
|
|
||||||
|
|
||||||
for i, validator in enumerate(qualified_validators[:self.config.max_validators]):
|
|
||||||
if i == 0:
|
|
||||||
validator.role = ValidatorRole.PROPOSER
|
|
||||||
elif i < 4:
|
|
||||||
validator.role = ValidatorRole.VALIDATOR
|
|
||||||
else:
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
|
|
||||||
self.last_rotation_height += self.config.rotation_interval
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _rotate_hybrid(self) -> bool:
|
|
||||||
"""Hybrid rotation considering both stake and reputation"""
|
|
||||||
validators = [v for v in self.consensus.validators.values() if v.is_active]
|
|
||||||
|
|
||||||
# Calculate hybrid score
|
|
||||||
for validator in validators:
|
|
||||||
validator.hybrid_score = validator.stake * validator.reputation
|
|
||||||
|
|
||||||
# Sort by hybrid score
|
|
||||||
validators.sort(key=lambda v: v.hybrid_score, reverse=True)
|
|
||||||
|
|
||||||
for i, validator in enumerate(validators[:self.config.max_validators]):
|
|
||||||
if i == 0:
|
|
||||||
validator.role = ValidatorRole.PROPOSER
|
|
||||||
elif i < 4:
|
|
||||||
validator.role = ValidatorRole.VALIDATOR
|
|
||||||
else:
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
|
|
||||||
self.last_rotation_height += self.config.rotation_interval
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Default rotation configuration
|
|
||||||
DEFAULT_ROTATION_CONFIG = RotationConfig(
|
|
||||||
strategy=RotationStrategy.HYBRID,
|
|
||||||
rotation_interval=100, # Rotate every 100 blocks
|
|
||||||
min_stake=1000.0,
|
|
||||||
reputation_threshold=0.7,
|
|
||||||
max_validators=10
|
|
||||||
)
|
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
"""
|
|
||||||
Slashing Conditions Implementation
|
|
||||||
Handles detection and penalties for validator misbehavior
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
from typing import Dict, List, Optional, Set
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from .multi_validator_poa import Validator, ValidatorRole
|
|
||||||
|
|
||||||
class SlashingCondition(Enum):
|
|
||||||
DOUBLE_SIGN = "double_sign"
|
|
||||||
UNAVAILABLE = "unavailable"
|
|
||||||
INVALID_BLOCK = "invalid_block"
|
|
||||||
SLOW_RESPONSE = "slow_response"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class SlashingEvent:
|
|
||||||
validator_address: str
|
|
||||||
condition: SlashingCondition
|
|
||||||
evidence: str
|
|
||||||
block_height: int
|
|
||||||
timestamp: float
|
|
||||||
slash_amount: float
|
|
||||||
|
|
||||||
class SlashingManager:
|
|
||||||
"""Manages validator slashing conditions and penalties"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.slashing_events: List[SlashingEvent] = []
|
|
||||||
self.slash_rates = {
|
|
||||||
SlashingCondition.DOUBLE_SIGN: 0.5, # 50% slash
|
|
||||||
SlashingCondition.UNAVAILABLE: 0.1, # 10% slash
|
|
||||||
SlashingCondition.INVALID_BLOCK: 0.3, # 30% slash
|
|
||||||
SlashingCondition.SLOW_RESPONSE: 0.05 # 5% slash
|
|
||||||
}
|
|
||||||
self.slash_thresholds = {
|
|
||||||
SlashingCondition.DOUBLE_SIGN: 1, # Immediate slash
|
|
||||||
SlashingCondition.UNAVAILABLE: 3, # After 3 offenses
|
|
||||||
SlashingCondition.INVALID_BLOCK: 1, # Immediate slash
|
|
||||||
SlashingCondition.SLOW_RESPONSE: 5 # After 5 offenses
|
|
||||||
}
|
|
||||||
|
|
||||||
def detect_double_sign(self, validator: str, block_hash1: str, block_hash2: str, height: int) -> Optional[SlashingEvent]:
|
|
||||||
"""Detect double signing (validator signed two different blocks at same height)"""
|
|
||||||
if block_hash1 == block_hash2:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return SlashingEvent(
|
|
||||||
validator_address=validator,
|
|
||||||
condition=SlashingCondition.DOUBLE_SIGN,
|
|
||||||
evidence=f"Double sign detected: {block_hash1} vs {block_hash2} at height {height}",
|
|
||||||
block_height=height,
|
|
||||||
timestamp=time.time(),
|
|
||||||
slash_amount=self.slash_rates[SlashingCondition.DOUBLE_SIGN]
|
|
||||||
)
|
|
||||||
|
|
||||||
def detect_unavailability(self, validator: str, missed_blocks: int, height: int) -> Optional[SlashingEvent]:
|
|
||||||
"""Detect validator unavailability (missing consensus participation)"""
|
|
||||||
if missed_blocks < self.slash_thresholds[SlashingCondition.UNAVAILABLE]:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return SlashingEvent(
|
|
||||||
validator_address=validator,
|
|
||||||
condition=SlashingCondition.UNAVAILABLE,
|
|
||||||
evidence=f"Missed {missed_blocks} consecutive blocks",
|
|
||||||
block_height=height,
|
|
||||||
timestamp=time.time(),
|
|
||||||
slash_amount=self.slash_rates[SlashingCondition.UNAVAILABLE]
|
|
||||||
)
|
|
||||||
|
|
||||||
def detect_invalid_block(self, validator: str, block_hash: str, reason: str, height: int) -> Optional[SlashingEvent]:
|
|
||||||
"""Detect invalid block proposal"""
|
|
||||||
return SlashingEvent(
|
|
||||||
validator_address=validator,
|
|
||||||
condition=SlashingCondition.INVALID_BLOCK,
|
|
||||||
evidence=f"Invalid block {block_hash}: {reason}",
|
|
||||||
block_height=height,
|
|
||||||
timestamp=time.time(),
|
|
||||||
slash_amount=self.slash_rates[SlashingCondition.INVALID_BLOCK]
|
|
||||||
)
|
|
||||||
|
|
||||||
def detect_slow_response(self, validator: str, response_time: float, threshold: float, height: int) -> Optional[SlashingEvent]:
|
|
||||||
"""Detect slow consensus participation"""
|
|
||||||
if response_time <= threshold:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return SlashingEvent(
|
|
||||||
validator_address=validator,
|
|
||||||
condition=SlashingCondition.SLOW_RESPONSE,
|
|
||||||
evidence=f"Slow response: {response_time}s (threshold: {threshold}s)",
|
|
||||||
block_height=height,
|
|
||||||
timestamp=time.time(),
|
|
||||||
slash_amount=self.slash_rates[SlashingCondition.SLOW_RESPONSE]
|
|
||||||
)
|
|
||||||
|
|
||||||
def apply_slashing(self, validator: Validator, event: SlashingEvent) -> bool:
|
|
||||||
"""Apply slashing penalty to validator"""
|
|
||||||
slash_amount = validator.stake * event.slash_amount
|
|
||||||
validator.stake -= slash_amount
|
|
||||||
|
|
||||||
# Demote validator role if stake is too low
|
|
||||||
if validator.stake < 100: # Minimum stake threshold
|
|
||||||
validator.role = ValidatorRole.STANDBY
|
|
||||||
|
|
||||||
# Record slashing event
|
|
||||||
self.slashing_events.append(event)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_validator_slash_count(self, validator_address: str, condition: SlashingCondition) -> int:
|
|
||||||
"""Get count of slashing events for validator and condition"""
|
|
||||||
return len([
|
|
||||||
event for event in self.slashing_events
|
|
||||||
if event.validator_address == validator_address and event.condition == condition
|
|
||||||
])
|
|
||||||
|
|
||||||
def should_slash(self, validator: str, condition: SlashingCondition) -> bool:
|
|
||||||
"""Check if validator should be slashed for condition"""
|
|
||||||
current_count = self.get_validator_slash_count(validator, condition)
|
|
||||||
threshold = self.slash_thresholds.get(condition, 1)
|
|
||||||
return current_count >= threshold
|
|
||||||
|
|
||||||
def get_slashing_history(self, validator_address: Optional[str] = None) -> List[SlashingEvent]:
|
|
||||||
"""Get slashing history for validator or all validators"""
|
|
||||||
if validator_address:
|
|
||||||
return [event for event in self.slashing_events if event.validator_address == validator_address]
|
|
||||||
return self.slashing_events.copy()
|
|
||||||
|
|
||||||
def calculate_total_slashed(self, validator_address: str) -> float:
|
|
||||||
"""Calculate total amount slashed for validator"""
|
|
||||||
events = self.get_slashing_history(validator_address)
|
|
||||||
return sum(event.slash_amount for event in events)
|
|
||||||
|
|
||||||
# Global slashing manager
|
|
||||||
slashing_manager = SlashingManager()
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from .poa import PoAProposer, ProposerConfig, CircuitBreaker
|
|
||||||
|
|
||||||
__all__ = ["PoAProposer", "ProposerConfig", "CircuitBreaker"]
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
"""
|
|
||||||
Validator Key Management
|
|
||||||
Handles cryptographic key operations for validators
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Dict, Optional, Tuple
|
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives.serialization import Encoding, PrivateFormat, NoEncryption
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ValidatorKeyPair:
|
|
||||||
address: str
|
|
||||||
private_key_pem: str
|
|
||||||
public_key_pem: str
|
|
||||||
created_at: float
|
|
||||||
last_rotated: float
|
|
||||||
|
|
||||||
class KeyManager:
|
|
||||||
"""Manages validator cryptographic keys"""
|
|
||||||
|
|
||||||
def __init__(self, keys_dir: str = "/opt/aitbc/keys"):
|
|
||||||
self.keys_dir = keys_dir
|
|
||||||
self.key_pairs: Dict[str, ValidatorKeyPair] = {}
|
|
||||||
self._ensure_keys_directory()
|
|
||||||
self._load_existing_keys()
|
|
||||||
|
|
||||||
def _ensure_keys_directory(self):
|
|
||||||
"""Ensure keys directory exists and has proper permissions"""
|
|
||||||
os.makedirs(self.keys_dir, mode=0o700, exist_ok=True)
|
|
||||||
|
|
||||||
def _load_existing_keys(self):
|
|
||||||
"""Load existing key pairs from disk"""
|
|
||||||
keys_file = os.path.join(self.keys_dir, "validator_keys.json")
|
|
||||||
|
|
||||||
if os.path.exists(keys_file):
|
|
||||||
try:
|
|
||||||
with open(keys_file, 'r') as f:
|
|
||||||
keys_data = json.load(f)
|
|
||||||
|
|
||||||
for address, key_data in keys_data.items():
|
|
||||||
self.key_pairs[address] = ValidatorKeyPair(
|
|
||||||
address=address,
|
|
||||||
private_key_pem=key_data['private_key_pem'],
|
|
||||||
public_key_pem=key_data['public_key_pem'],
|
|
||||||
created_at=key_data['created_at'],
|
|
||||||
last_rotated=key_data['last_rotated']
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error loading keys: {e}")
|
|
||||||
|
|
||||||
def generate_key_pair(self, address: str) -> ValidatorKeyPair:
|
|
||||||
"""Generate new RSA key pair for validator"""
|
|
||||||
# Generate private key
|
|
||||||
private_key = rsa.generate_private_key(
|
|
||||||
public_exponent=65537,
|
|
||||||
key_size=2048,
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Serialize private key
|
|
||||||
private_key_pem = private_key.private_bytes(
|
|
||||||
encoding=Encoding.PEM,
|
|
||||||
format=PrivateFormat.PKCS8,
|
|
||||||
encryption_algorithm=NoEncryption()
|
|
||||||
).decode('utf-8')
|
|
||||||
|
|
||||||
# Get public key
|
|
||||||
public_key = private_key.public_key()
|
|
||||||
public_key_pem = public_key.public_bytes(
|
|
||||||
encoding=Encoding.PEM,
|
|
||||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
|
||||||
).decode('utf-8')
|
|
||||||
|
|
||||||
# Create key pair object
|
|
||||||
current_time = time.time()
|
|
||||||
key_pair = ValidatorKeyPair(
|
|
||||||
address=address,
|
|
||||||
private_key_pem=private_key_pem,
|
|
||||||
public_key_pem=public_key_pem,
|
|
||||||
created_at=current_time,
|
|
||||||
last_rotated=current_time
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store key pair
|
|
||||||
self.key_pairs[address] = key_pair
|
|
||||||
self._save_keys()
|
|
||||||
|
|
||||||
return key_pair
|
|
||||||
|
|
||||||
def get_key_pair(self, address: str) -> Optional[ValidatorKeyPair]:
|
|
||||||
"""Get key pair for validator"""
|
|
||||||
return self.key_pairs.get(address)
|
|
||||||
|
|
||||||
def rotate_key(self, address: str) -> Optional[ValidatorKeyPair]:
|
|
||||||
"""Rotate validator keys"""
|
|
||||||
if address not in self.key_pairs:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Generate new key pair
|
|
||||||
new_key_pair = self.generate_key_pair(address)
|
|
||||||
|
|
||||||
# Update rotation time
|
|
||||||
new_key_pair.created_at = self.key_pairs[address].created_at
|
|
||||||
new_key_pair.last_rotated = time.time()
|
|
||||||
|
|
||||||
self._save_keys()
|
|
||||||
return new_key_pair
|
|
||||||
|
|
||||||
def sign_message(self, address: str, message: str) -> Optional[str]:
|
|
||||||
"""Sign message with validator private key"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Load private key from PEM
|
|
||||||
private_key = serialization.load_pem_private_key(
|
|
||||||
key_pair.private_key_pem.encode(),
|
|
||||||
password=None,
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Sign message
|
|
||||||
signature = private_key.sign(
|
|
||||||
message.encode('utf-8'),
|
|
||||||
hashes.SHA256(),
|
|
||||||
default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature.hex()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error signing message: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def verify_signature(self, address: str, message: str, signature: str) -> bool:
|
|
||||||
"""Verify message signature"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Load public key from PEM
|
|
||||||
public_key = serialization.load_pem_public_key(
|
|
||||||
key_pair.public_key_pem.encode(),
|
|
||||||
backend=default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify signature
|
|
||||||
public_key.verify(
|
|
||||||
bytes.fromhex(signature),
|
|
||||||
message.encode('utf-8'),
|
|
||||||
hashes.SHA256(),
|
|
||||||
default_backend()
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error verifying signature: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_public_key_pem(self, address: str) -> Optional[str]:
|
|
||||||
"""Get public key PEM for validator"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
return key_pair.public_key_pem if key_pair else None
|
|
||||||
|
|
||||||
def _save_keys(self):
|
|
||||||
"""Save key pairs to disk"""
|
|
||||||
keys_file = os.path.join(self.keys_dir, "validator_keys.json")
|
|
||||||
|
|
||||||
keys_data = {}
|
|
||||||
for address, key_pair in self.key_pairs.items():
|
|
||||||
keys_data[address] = {
|
|
||||||
'private_key_pem': key_pair.private_key_pem,
|
|
||||||
'public_key_pem': key_pair.public_key_pem,
|
|
||||||
'created_at': key_pair.created_at,
|
|
||||||
'last_rotated': key_pair.last_rotated
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(keys_file, 'w') as f:
|
|
||||||
json.dump(keys_data, f, indent=2)
|
|
||||||
|
|
||||||
# Set secure permissions
|
|
||||||
os.chmod(keys_file, 0o600)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error saving keys: {e}")
|
|
||||||
|
|
||||||
def should_rotate_key(self, address: str, rotation_interval: int = 86400) -> bool:
|
|
||||||
"""Check if key should be rotated (default: 24 hours)"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return (time.time() - key_pair.last_rotated) >= rotation_interval
|
|
||||||
|
|
||||||
def get_key_age(self, address: str) -> Optional[float]:
|
|
||||||
"""Get age of key in seconds"""
|
|
||||||
key_pair = self.get_key_pair(address)
|
|
||||||
if not key_pair:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return time.time() - key_pair.created_at
|
|
||||||
|
|
||||||
# Global key manager
|
|
||||||
key_manager = KeyManager()
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user