From 55ced779280df07146a55cb316181562eceebd41 Mon Sep 17 00:00:00 2001 From: oib Date: Sat, 24 Jan 2026 15:46:23 +0100 Subject: [PATCH] ``` chore: remove obsolete files and add Solidity build artifacts to .gitignore - Add ignore patterns for Solidity build artifacts (typechain-types, artifacts, cache) - Remove unused exchange mock API server (api/exchange_mock_api.py) - Remove obsolete client-web README placeholder - Remove deprecated marketplace-ui HTML implementation ``` --- .gitignore | 5 + api/exchange_mock_api.py | 107 -- apps/client-web/README.md | 9 - apps/marketplace-ui/index.html | 491 ---------- apps/marketplace-ui/server.py | 53 - apps/miner-dashboard/README.md | 164 ---- .../aitbc-miner-dashboard.service | 15 - apps/miner-dashboard/aitbc-miner.service | 15 - apps/miner-dashboard/dashboard_server.py | 185 ---- apps/miner-dashboard/deploy.sh | 71 -- apps/miner-dashboard/deploy_on_host.sh | 356 ------- apps/miner-dashboard/host_deploy.sh | 313 ------ apps/miner-dashboard/host_only_setup.sh | 189 ---- apps/miner-dashboard/index.html | 449 --------- apps/miner-dashboard/miner_service.py | 181 ---- apps/miner-dashboard/quick_deploy.sh | 180 ---- apps/miner-dashboard/setup.sh | 30 - apps/miner-node/README.md | 27 - apps/miner-node/plugins/__init__.py | 15 - apps/miner-node/plugins/base.py | 111 --- apps/miner-node/plugins/blender.py | 371 ------- apps/miner-node/plugins/discovery.py | 215 ---- apps/miner-node/plugins/exceptions.py | 23 - apps/miner-node/plugins/ffmpeg.py | 318 ------ apps/miner-node/plugins/llm_inference.py | 321 ------ apps/miner-node/plugins/registry.py | 138 --- apps/miner-node/plugins/stable_diffusion.py | 281 ------ apps/miner-node/plugins/whisper.py | 215 ---- apps/miner-node/pyproject.toml | 30 - apps/miner-node/src/aitbc_miner/__init__.py | 1 - .../src/aitbc_miner/agent/__init__.py | 1 - .../src/aitbc_miner/agent/control.py | 127 --- apps/miner-node/src/aitbc_miner/config.py | 40 - .../miner-node/src/aitbc_miner/coordinator.py | 76 -- apps/miner-node/src/aitbc_miner/logging.py | 25 - apps/miner-node/src/aitbc_miner/main.py | 51 - .../src/aitbc_miner/runners/__init__.py | 20 - .../src/aitbc_miner/runners/base.py | 17 - .../src/aitbc_miner/runners/cli/simple.py | 62 -- .../src/aitbc_miner/runners/python/noop.py | 20 - .../src/aitbc_miner/runners/service.py | 118 --- .../src/aitbc_miner/util/backoff.py | 19 - apps/miner-node/src/aitbc_miner/util/fs.py | 15 - apps/miner-node/src/aitbc_miner/util/probe.py | 91 -- apps/miner-node/tests/test_runners.py | 37 - apps/wallet-cli/aitbc-wallet | 245 ----- apps/wallet-cli/aitbc-wallet.1 | 102 -- apps/wallet-cli/aitbc_wallet.py | 256 ----- apps/wallet-cli/wallet.py | 101 -- configs/systemd/aitbc-miner.service | 25 - docs/.github/workflows/deploy-docs.yml | 115 --- docs/.pages | 87 -- .../integration}/partner-integration.md | 0 .../integration}/skills-framework.md | 0 .../testing}/localhost-testing-scenario.md | 0 docs/done.md | 30 +- docs/files.md | 317 ++++++ docs/mkdocs.yml | 204 ---- .../README-CONTAINER-DEPLOYMENT.md | 0 .../deployment}/README-DOMAIN-DEPLOYMENT.md | 0 .../deployment}/nginx-domain-setup.md | 0 .../deployment}/simple-domain-solution.md | 0 .../deployment}/systemd_services.md | 0 .../coordinator_postgresql_migration.md | 0 .../migration}/postgresql_migration.md | 0 .../wallet_daemon_postgresql_migration.md | 0 .../components}/blockchain_node.md | 0 .../components}/coordinator_api.md | 0 .../components}/explorer_web.md | 0 .../components}/marketplace_web.md | 0 docs/{ => reference/components}/miner.md | 0 docs/{ => reference/components}/miner_node.md | 0 docs/{ => reference/components}/pool_hub.md | 0 .../components}/trade_exchange.md | 0 .../components}/wallet_daemon.md | 0 .../components}/zk-applications.md | 0 docs/reference/done.md | 205 ---- docs/{ => reference/governance}/governance.md | 0 .../roadmap-retrospective-template.md | 0 .../transparency-report-template.md | 0 docs/reference/roadmap.md | 458 --------- .../reference/specs/receipt-spec.md | 0 docs/requirements.txt | 27 - docs/roadmap.md | 601 +++++++++++- docs/user-guide/creating-jobs.md | 49 - docs/user-guide/explorer.md | 49 - docs/user-guide/marketplace.md | 46 - docs/user-guide/overview.md | 27 - docs/user-guide/wallet-management.md | 65 -- .../{ => user/guides}/BITCOIN-WALLET-SETUP.md | 0 .../{ => user/guides}/LOCAL_ASSETS_SUMMARY.md | 0 .../{ => user/guides}/USER-INTERFACE-GUIDE.md | 0 .../guides}/USER-MANAGEMENT-SETUP.md | 0 ecosystem-analytics/analytics_service.py | 628 ------------ ecosystem-analytics/kpi_tracker.py | 927 ------------------ .../registry/api-specification.yaml | 635 ------------ ecosystem-certification/test-suite/README.md | 55 -- .../test-suite/certify-stripe.py | 175 ---- .../fixtures/bronze/api-compliance.json | 264 ----- .../test-suite/runners/python/test_runner.py | 357 ------- .../test-suite/security/security_validator.py | 638 ------------ .../template/cookiecutter.json | 89 -- .../extension.yaml | 304 ------ .../{{cookiecutter.package_name}}/setup.py | 97 -- .../{{cookiecutter.package_name}}/__init__.py | 13 - .../{{ cookiecutter.extension_name }}.py | 369 ------- ecosystem/academic/engagement-framework.md | 335 ------- .../python-sdk/aitbc_enterprise/__init__.py | 30 - .../python-sdk/aitbc_enterprise/auth.py | 207 ---- .../python-sdk/aitbc_enterprise/base.py | 369 ------- .../python-sdk/aitbc_enterprise/core.py | 296 ------ .../aitbc_enterprise/erp/__init__.py | 18 - .../python-sdk/aitbc_enterprise/erp/base.py | 501 ---------- .../aitbc_enterprise/erp/netsuite.py | 19 - .../python-sdk/aitbc_enterprise/erp/oracle.py | 19 - .../python-sdk/aitbc_enterprise/erp/sap.py | 19 - .../python-sdk/aitbc_enterprise/exceptions.py | 68 -- .../python-sdk/aitbc_enterprise/metrics.py | 293 ------ .../aitbc_enterprise/payments/__init__.py | 19 - .../aitbc_enterprise/payments/base.py | 256 ----- .../aitbc_enterprise/payments/paypal.py | 33 - .../aitbc_enterprise/payments/square.py | 33 - .../aitbc_enterprise/payments/stripe.py | 489 --------- .../aitbc_enterprise/rate_limiter.py | 189 ---- .../python-sdk/aitbc_enterprise/validators.py | 318 ------ .../python-sdk/aitbc_enterprise/webhooks.py | 309 ------ .../python-sdk/docs/README.md | 270 ----- .../python-sdk/docs/api-specification.md | 598 ----------- .../python-sdk/examples/stripe_example.py | 282 ------ examples/receipts-sign-verify/README.md | 39 - .../receipts-sign-verify/fetch_and_verify.py | 78 -- .../aitbc-wallet-firefox-simple/README.md | 133 --- .../aitbc-wallet.xpi | Bin 7748 -> 0 bytes .../aitbc-wallet-firefox-simple/background.js | 160 --- .../aitbc-wallet-firefox-simple/content.js | 42 - .../icons/icon-128.png | Bin 1766 -> 0 bytes .../icons/icon-16.png | Bin 123 -> 0 bytes .../icons/icon-32.png | Bin 400 -> 0 bytes .../icons/icon-48.png | Bin 646 -> 0 bytes .../icons/icon.svg | 13 - .../aitbc-wallet-firefox-simple/injected.js | 113 --- .../aitbc-wallet-firefox-simple/install.html | 149 --- .../aitbc-wallet-firefox-simple/manifest.json | 46 - .../aitbc-wallet-firefox-simple/popup.html | 112 --- .../aitbc-wallet-firefox-simple/popup.js | 315 ------ extensions/aitbc-wallet-simple/README.md | 112 --- extensions/aitbc-wallet-simple/content.js | 28 - extensions/aitbc-wallet-simple/injected.js | 106 -- extensions/aitbc-wallet-simple/install.html | 156 --- extensions/aitbc-wallet-simple/manifest.json | 32 - extensions/aitbc-wallet-simple/popup.html | 109 -- extensions/aitbc-wallet-simple/popup.js | 162 --- extensions/aitbc-wallet/README.md | 112 --- extensions/aitbc-wallet/content.js | 28 - extensions/aitbc-wallet/injected.js | 106 -- extensions/aitbc-wallet/manifest.json | 32 - extensions/aitbc-wallet/popup.html | 109 -- extensions/aitbc-wallet/popup.js | 162 --- governance/README.md | 203 ---- governance/calls.md | 283 ------ python-sdk/aitbc/apis/__init__.py | 19 - python-sdk/aitbc/apis/jobs.py | 94 -- python-sdk/aitbc/apis/marketplace.py | 46 - python-sdk/aitbc/apis/receipts.py | 34 - python-sdk/aitbc/apis/settlement.py | 100 -- python-sdk/aitbc/apis/wallet.py | 50 - python-sdk/aitbc/client.py | 364 ------- python-sdk/aitbc/transport/__init__.py | 17 - python-sdk/aitbc/transport/base.py | 264 ----- python-sdk/aitbc/transport/http.py | 405 -------- python-sdk/aitbc/transport/multinetwork.py | 377 ------- python-sdk/aitbc/transport/websocket.py | 449 --------- research/autonomous-agents/agent-framework.md | 474 --------- .../economic_models_research_plan.md | 737 -------------- research/consortium/executive_summary.md | 156 --- research/consortium/framework.md | 367 ------- .../consortium/governance_research_plan.md | 666 ------------- .../consortium/hybrid_pos_research_plan.md | 432 -------- research/consortium/scaling_research_plan.md | 477 --------- .../whitepapers/hybrid_consensus_v1.md | 411 -------- .../zk_applications_research_plan.md | 654 ------------ .../prototypes/hybrid_consensus/README.md | 196 ---- .../prototypes/hybrid_consensus/consensus.py | 431 -------- research/prototypes/hybrid_consensus/demo.py | 346 ------- .../hybrid_consensus/requirements.txt | 31 - research/prototypes/rollups/zk_rollup.py | 474 --------- research/prototypes/sharding/beacon_chain.py | 356 ------- research/standards/eip-aitbc-receipts.md | 458 --------- scripts/gpu/gpu_miner_demo.py | 60 -- scripts/gpu/gpu_miner_real.py | 329 ------- scripts/gpu/gpu_miner_simple.py | 299 ------ scripts/gpu/gpu_miner_with_wait.py | 210 ---- scripts/gpu/simple_gpu_miner.py | 182 ---- windsurf/README.md | 3 - windsurf/settings.json | 5 - 195 files changed, 951 insertions(+), 30090 deletions(-) delete mode 100644 api/exchange_mock_api.py delete mode 100644 apps/client-web/README.md delete mode 100644 apps/marketplace-ui/index.html delete mode 100755 apps/marketplace-ui/server.py delete mode 100644 apps/miner-dashboard/README.md delete mode 100644 apps/miner-dashboard/aitbc-miner-dashboard.service delete mode 100644 apps/miner-dashboard/aitbc-miner.service delete mode 100644 apps/miner-dashboard/dashboard_server.py delete mode 100644 apps/miner-dashboard/deploy.sh delete mode 100644 apps/miner-dashboard/deploy_on_host.sh delete mode 100644 apps/miner-dashboard/host_deploy.sh delete mode 100644 apps/miner-dashboard/host_only_setup.sh delete mode 100644 apps/miner-dashboard/index.html delete mode 100644 apps/miner-dashboard/miner_service.py delete mode 100644 apps/miner-dashboard/quick_deploy.sh delete mode 100644 apps/miner-dashboard/setup.sh delete mode 100644 apps/miner-node/README.md delete mode 100644 apps/miner-node/plugins/__init__.py delete mode 100644 apps/miner-node/plugins/base.py delete mode 100644 apps/miner-node/plugins/blender.py delete mode 100644 apps/miner-node/plugins/discovery.py delete mode 100644 apps/miner-node/plugins/exceptions.py delete mode 100644 apps/miner-node/plugins/ffmpeg.py delete mode 100644 apps/miner-node/plugins/llm_inference.py delete mode 100644 apps/miner-node/plugins/registry.py delete mode 100644 apps/miner-node/plugins/stable_diffusion.py delete mode 100644 apps/miner-node/plugins/whisper.py delete mode 100644 apps/miner-node/pyproject.toml delete mode 100644 apps/miner-node/src/aitbc_miner/__init__.py delete mode 100644 apps/miner-node/src/aitbc_miner/agent/__init__.py delete mode 100644 apps/miner-node/src/aitbc_miner/agent/control.py delete mode 100644 apps/miner-node/src/aitbc_miner/config.py delete mode 100644 apps/miner-node/src/aitbc_miner/coordinator.py delete mode 100644 apps/miner-node/src/aitbc_miner/logging.py delete mode 100644 apps/miner-node/src/aitbc_miner/main.py delete mode 100644 apps/miner-node/src/aitbc_miner/runners/__init__.py delete mode 100644 apps/miner-node/src/aitbc_miner/runners/base.py delete mode 100644 apps/miner-node/src/aitbc_miner/runners/cli/simple.py delete mode 100644 apps/miner-node/src/aitbc_miner/runners/python/noop.py delete mode 100644 apps/miner-node/src/aitbc_miner/runners/service.py delete mode 100644 apps/miner-node/src/aitbc_miner/util/backoff.py delete mode 100644 apps/miner-node/src/aitbc_miner/util/fs.py delete mode 100644 apps/miner-node/src/aitbc_miner/util/probe.py delete mode 100644 apps/miner-node/tests/test_runners.py delete mode 100755 apps/wallet-cli/aitbc-wallet delete mode 100644 apps/wallet-cli/aitbc-wallet.1 delete mode 100755 apps/wallet-cli/aitbc_wallet.py delete mode 100755 apps/wallet-cli/wallet.py delete mode 100644 configs/systemd/aitbc-miner.service delete mode 100644 docs/.github/workflows/deploy-docs.yml delete mode 100644 docs/.pages rename docs/{ => developer/integration}/partner-integration.md (100%) rename docs/{ => developer/integration}/skills-framework.md (100%) rename docs/{ => developer/testing}/localhost-testing-scenario.md (100%) create mode 100644 docs/files.md delete mode 100644 docs/mkdocs.yml rename docs/{ => operator/deployment}/README-CONTAINER-DEPLOYMENT.md (100%) rename docs/{ => operator/deployment}/README-DOMAIN-DEPLOYMENT.md (100%) rename docs/{ => operator/deployment}/nginx-domain-setup.md (100%) rename docs/{ => operator/deployment}/simple-domain-solution.md (100%) rename docs/{ => operator/deployment}/systemd_services.md (100%) rename docs/{ => operator/migration}/coordinator_postgresql_migration.md (100%) rename docs/{ => operator/migration}/postgresql_migration.md (100%) rename docs/{ => operator/migration}/wallet_daemon_postgresql_migration.md (100%) rename docs/{ => reference/components}/blockchain_node.md (100%) rename docs/{ => reference/components}/coordinator_api.md (100%) rename docs/{ => reference/components}/explorer_web.md (100%) rename docs/{ => reference/components}/marketplace_web.md (100%) rename docs/{ => reference/components}/miner.md (100%) rename docs/{ => reference/components}/miner_node.md (100%) rename docs/{ => reference/components}/pool_hub.md (100%) rename docs/{ => reference/components}/trade_exchange.md (100%) rename docs/{ => reference/components}/wallet_daemon.md (100%) rename docs/{ => reference/components}/zk-applications.md (100%) delete mode 100644 docs/reference/done.md rename docs/{ => reference/governance}/governance.md (100%) rename docs/{ => reference/governance}/roadmap-retrospective-template.md (100%) rename docs/{ => reference/governance}/transparency-report-template.md (100%) delete mode 100644 docs/reference/roadmap.md rename protocols/receipts/spec.md => docs/reference/specs/receipt-spec.md (100%) delete mode 100644 docs/requirements.txt mode change 120000 => 100644 docs/roadmap.md delete mode 100644 docs/user-guide/creating-jobs.md delete mode 100644 docs/user-guide/explorer.md delete mode 100644 docs/user-guide/marketplace.md delete mode 100644 docs/user-guide/overview.md delete mode 100644 docs/user-guide/wallet-management.md rename docs/{ => user/guides}/BITCOIN-WALLET-SETUP.md (100%) rename docs/{ => user/guides}/LOCAL_ASSETS_SUMMARY.md (100%) rename docs/{ => user/guides}/USER-INTERFACE-GUIDE.md (100%) rename docs/{ => user/guides}/USER-MANAGEMENT-SETUP.md (100%) delete mode 100644 ecosystem-analytics/analytics_service.py delete mode 100644 ecosystem-analytics/kpi_tracker.py delete mode 100644 ecosystem-certification/registry/api-specification.yaml delete mode 100644 ecosystem-certification/test-suite/README.md delete mode 100644 ecosystem-certification/test-suite/certify-stripe.py delete mode 100644 ecosystem-certification/test-suite/fixtures/bronze/api-compliance.json delete mode 100644 ecosystem-certification/test-suite/runners/python/test_runner.py delete mode 100644 ecosystem-certification/test-suite/security/security_validator.py delete mode 100644 ecosystem-extensions/template/cookiecutter.json delete mode 100644 ecosystem-extensions/template/{{cookiecutter.package_name}}/extension.yaml delete mode 100644 ecosystem-extensions/template/{{cookiecutter.package_name}}/setup.py delete mode 100644 ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/__init__.py delete mode 100644 ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/{{ cookiecutter.extension_name }}.py delete mode 100644 ecosystem/academic/engagement-framework.md delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/__init__.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/auth.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/base.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/core.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/erp/__init__.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/erp/base.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/erp/netsuite.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/erp/oracle.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/erp/sap.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/exceptions.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/metrics.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/payments/__init__.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/payments/base.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/payments/paypal.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/payments/square.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/payments/stripe.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/rate_limiter.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/validators.py delete mode 100644 enterprise-connectors/python-sdk/aitbc_enterprise/webhooks.py delete mode 100644 enterprise-connectors/python-sdk/docs/README.md delete mode 100644 enterprise-connectors/python-sdk/docs/api-specification.md delete mode 100644 enterprise-connectors/python-sdk/examples/stripe_example.py delete mode 100644 examples/receipts-sign-verify/README.md delete mode 100644 examples/receipts-sign-verify/fetch_and_verify.py delete mode 100644 extensions/aitbc-wallet-firefox-simple/README.md delete mode 100644 extensions/aitbc-wallet-firefox-simple/aitbc-wallet.xpi delete mode 100644 extensions/aitbc-wallet-firefox-simple/background.js delete mode 100644 extensions/aitbc-wallet-firefox-simple/content.js delete mode 100644 extensions/aitbc-wallet-firefox-simple/icons/icon-128.png delete mode 100644 extensions/aitbc-wallet-firefox-simple/icons/icon-16.png delete mode 100644 extensions/aitbc-wallet-firefox-simple/icons/icon-32.png delete mode 100644 extensions/aitbc-wallet-firefox-simple/icons/icon-48.png delete mode 100644 extensions/aitbc-wallet-firefox-simple/icons/icon.svg delete mode 100644 extensions/aitbc-wallet-firefox-simple/injected.js delete mode 100644 extensions/aitbc-wallet-firefox-simple/install.html delete mode 100644 extensions/aitbc-wallet-firefox-simple/manifest.json delete mode 100644 extensions/aitbc-wallet-firefox-simple/popup.html delete mode 100644 extensions/aitbc-wallet-firefox-simple/popup.js delete mode 100644 extensions/aitbc-wallet-simple/README.md delete mode 100644 extensions/aitbc-wallet-simple/content.js delete mode 100644 extensions/aitbc-wallet-simple/injected.js delete mode 100644 extensions/aitbc-wallet-simple/install.html delete mode 100644 extensions/aitbc-wallet-simple/manifest.json delete mode 100644 extensions/aitbc-wallet-simple/popup.html delete mode 100644 extensions/aitbc-wallet-simple/popup.js delete mode 100644 extensions/aitbc-wallet/README.md delete mode 100644 extensions/aitbc-wallet/content.js delete mode 100644 extensions/aitbc-wallet/injected.js delete mode 100644 extensions/aitbc-wallet/manifest.json delete mode 100644 extensions/aitbc-wallet/popup.html delete mode 100644 extensions/aitbc-wallet/popup.js delete mode 100644 governance/README.md delete mode 100644 governance/calls.md delete mode 100644 python-sdk/aitbc/apis/__init__.py delete mode 100644 python-sdk/aitbc/apis/jobs.py delete mode 100644 python-sdk/aitbc/apis/marketplace.py delete mode 100644 python-sdk/aitbc/apis/receipts.py delete mode 100644 python-sdk/aitbc/apis/settlement.py delete mode 100644 python-sdk/aitbc/apis/wallet.py delete mode 100644 python-sdk/aitbc/client.py delete mode 100644 python-sdk/aitbc/transport/__init__.py delete mode 100644 python-sdk/aitbc/transport/base.py delete mode 100644 python-sdk/aitbc/transport/http.py delete mode 100644 python-sdk/aitbc/transport/multinetwork.py delete mode 100644 python-sdk/aitbc/transport/websocket.py delete mode 100644 research/autonomous-agents/agent-framework.md delete mode 100644 research/consortium/economic_models_research_plan.md delete mode 100644 research/consortium/executive_summary.md delete mode 100644 research/consortium/framework.md delete mode 100644 research/consortium/governance_research_plan.md delete mode 100644 research/consortium/hybrid_pos_research_plan.md delete mode 100644 research/consortium/scaling_research_plan.md delete mode 100644 research/consortium/whitepapers/hybrid_consensus_v1.md delete mode 100644 research/consortium/zk_applications_research_plan.md delete mode 100644 research/prototypes/hybrid_consensus/README.md delete mode 100644 research/prototypes/hybrid_consensus/consensus.py delete mode 100644 research/prototypes/hybrid_consensus/demo.py delete mode 100644 research/prototypes/hybrid_consensus/requirements.txt delete mode 100644 research/prototypes/rollups/zk_rollup.py delete mode 100644 research/prototypes/sharding/beacon_chain.py delete mode 100644 research/standards/eip-aitbc-receipts.md delete mode 100644 scripts/gpu/gpu_miner_demo.py delete mode 100644 scripts/gpu/gpu_miner_real.py delete mode 100644 scripts/gpu/gpu_miner_simple.py delete mode 100644 scripts/gpu/gpu_miner_with_wait.py delete mode 100644 scripts/gpu/simple_gpu_miner.py delete mode 100644 windsurf/README.md delete mode 100644 windsurf/settings.json diff --git a/.gitignore b/.gitignore index 49d1dfc1..9f33745d 100644 --- a/.gitignore +++ b/.gitignore @@ -133,6 +133,11 @@ apps/coordinator-api/src/*.db # Explorer build artifacts apps/explorer-web/dist/ +# Solidity build artifacts +packages/solidity/aitbc-token/typechain-types/ +packages/solidity/aitbc-token/artifacts/ +packages/solidity/aitbc-token/cache/ + # Local test data tests/fixtures/generated/ diff --git a/api/exchange_mock_api.py b/api/exchange_mock_api.py deleted file mode 100644 index 449fdecb..00000000 --- a/api/exchange_mock_api.py +++ /dev/null @@ -1,107 +0,0 @@ -#!/usr/bin/env python3 -import json -from http.server import BaseHTTPRequestHandler, HTTPServer -from urllib.parse import urlparse - - -class Handler(BaseHTTPRequestHandler): - def _json(self, payload, status=200): - body = json.dumps(payload).encode("utf-8") - self.send_response(status) - self.send_header("Content-Type", "application/json") - self.send_header("Access-Control-Allow-Origin", "*") - self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") - self.send_header("Access-Control-Allow-Headers", "Content-Type, X-Api-Key") - self.send_header("Content-Length", str(len(body))) - self.end_headers() - self.wfile.write(body) - - def do_OPTIONS(self): - self.send_response(204) - self.send_header("Access-Control-Allow-Origin", "*") - self.send_header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") - self.send_header("Access-Control-Allow-Headers", "Content-Type, X-Api-Key") - self.end_headers() - - def do_GET(self): - path = urlparse(self.path).path - - if path == "/api/trades/recent": - trades = [ - {"id": 1, "price": 0.00001, "amount": 1500, "created_at": "2026-01-21T17:00:00Z"}, - {"id": 2, "price": 0.0000095, "amount": 500, "created_at": "2026-01-21T16:55:00Z"}, - ] - return self._json(trades) - - if path == "/api/orders/orderbook": - orderbook = { - "sells": [{"price": 0.00001, "remaining": 1500, "amount": 1500}], - "buys": [{"price": 0.000009, "remaining": 1000, "amount": 1000}], - } - return self._json(orderbook) - - if path == "/api/wallet/balance": - return self._json({"balance": 1000, "currency": "AITBC"}) - - if path == "/api/treasury-balance": - return self._json({ - "balance": 50000, - "currency": "AITBC", - "usd_value": 5000.00, - "last_updated": "2026-01-21T18:00:00Z" - }) - - if path == "/api/exchange/wallet/info": - return self._json({ - "address": "aitbc1exchange123456789", - "balance": 1000, - "currency": "AITBC", - "total_transactions": 150, - "status": "active", - "transactions": [ - { - "id": "txn_001", - "type": "deposit", - "amount": 500, - "timestamp": "2026-01-21T17:00:00Z", - "status": "completed" - }, - { - "id": "txn_002", - "type": "withdrawal", - "amount": 200, - "timestamp": "2026-01-21T16:30:00Z", - "status": "completed" - }, - { - "id": "txn_003", - "type": "trade", - "amount": 100, - "timestamp": "2026-01-21T16:00:00Z", - "status": "completed" - } - ] - }) - - return self._json({"detail": "Not Found"}, status=404) - - def do_POST(self): - path = urlparse(self.path).path - - if path == "/api/wallet/connect": - resp = { - "success": True, - "address": "aitbc1wallet123456789", - "message": "Wallet connected successfully", - } - return self._json(resp) - - return self._json({"detail": "Not Found"}, status=404) - - -def main(): - HTTPServer(("127.0.0.1", 8085), Handler).serve_forever() - - -if __name__ == "__main__": - main() diff --git a/apps/client-web/README.md b/apps/client-web/README.md deleted file mode 100644 index 1013233a..00000000 --- a/apps/client-web/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Client Web - -## Purpose & Scope - -Front-end application that allows users to submit compute jobs, monitor status, and interact with AITBC services. See `docs/bootstrap/dirs.md` and `docs/bootstrap/examples.md` for guidance. - -## Development Setup - -Implementation pending. Recommended stack: lightweight web framework (per bootstrap doc) without heavy front-end frameworks. diff --git a/apps/marketplace-ui/index.html b/apps/marketplace-ui/index.html deleted file mode 100644 index 89c28547..00000000 --- a/apps/marketplace-ui/index.html +++ /dev/null @@ -1,491 +0,0 @@ - - - - - - AITBC Marketplace - GPU Compute Trading - - - - - - - - -
-
-
-
- -

AITBC Marketplace

-
- -
-
-
- - -
- -
-
-
-
-

Active Bids

-

0

-
- -
-
-
-
-
-

Total Capacity

-

0 GPUs

-
- -
-
-
-
-
-

Avg Price

-

$0.00

-
- -
-
-
-
-
-

Your Balance

-

0 AITBC

-
- -
-
-
- - -
-
-

Available GPU Compute

-
- - -
-
- -
- -
-
- - - - - - -
- - -
- -
- - - - diff --git a/apps/marketplace-ui/server.py b/apps/marketplace-ui/server.py deleted file mode 100755 index 4887d927..00000000 --- a/apps/marketplace-ui/server.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple HTTP server for the AITBC Marketplace UI -""" - -import os -import sys -from http.server import HTTPServer, SimpleHTTPRequestHandler -import argparse - -class CORSHTTPRequestHandler(SimpleHTTPRequestHandler): - def end_headers(self): - self.send_header('Access-Control-Allow-Origin', '*') - self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS') - self.send_header('Access-Control-Allow-Headers', 'Content-Type, X-Api-Key') - super().end_headers() - - def do_OPTIONS(self): - self.send_response(200) - self.end_headers() - -def run_server(port=3000, directory=None): - """Run the HTTP server""" - if directory: - os.chdir(directory) - - server_address = ('', port) - httpd = HTTPServer(server_address, CORSHTTPRequestHandler) - - print(f""" -╔═══════════════════════════════════════╗ -║ AITBC Marketplace UI Server ║ -╠═══════════════════════════════════════╣ -║ Server running at: ║ -║ http://localhost:{port} ║ -║ ║ -║ Press Ctrl+C to stop ║ -╚═══════════════════════════════════════╝ - """) - - try: - httpd.serve_forever() - except KeyboardInterrupt: - print("\nShutting down server...") - httpd.server_close() - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Run the AITBC Marketplace UI server') - parser.add_argument('--port', type=int, default=3000, help='Port to run the server on') - parser.add_argument('--dir', type=str, default='.', help='Directory to serve from') - - args = parser.parse_args() - run_server(port=args.port, directory=args.dir) diff --git a/apps/miner-dashboard/README.md b/apps/miner-dashboard/README.md deleted file mode 100644 index ba304165..00000000 --- a/apps/miner-dashboard/README.md +++ /dev/null @@ -1,164 +0,0 @@ -# AITBC Miner Dashboard - -A real-time monitoring dashboard for GPU mining operations in the AITBC network. - -## Features - -### 🎯 GPU Monitoring -- Real-time GPU utilization -- Temperature monitoring -- Power consumption tracking -- Memory usage display -- Performance state indicators - -### ⛏️ Mining Operations -- Active job tracking -- Job progress visualization -- Success/failure statistics -- Average job time metrics - -### 📊 Performance Analytics -- GPU utilization charts (last hour) -- Hash rate performance tracking -- Mining statistics dashboard -- Service capability overview - -### 🔧 Available Services -- GPU Computing (CUDA cores) -- Parallel Processing (multi-threaded) -- Hash Generation (proof-of-work) -- AI Model Training (ML operations) -- Blockchain Validation -- Data Processing - -## Quick Start - -### 1. Deploy the Dashboard -```bash -cd /home/oib/windsurf/aitbc/apps/miner-dashboard -sudo ./deploy.sh -``` - -### 2. Access the Dashboard -- Local: http://localhost:8080 -- Remote: http://[SERVER_IP]:8080 - -### 3. Monitor Mining -- View real-time GPU status -- Track active mining jobs -- Monitor hash rates -- Check service availability - -## Architecture - -``` -┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ -│ Web Browser │◄──►│ Dashboard Server │◄──►│ GPU Miner │ -│ (Dashboard UI) │ │ (Port 8080) │ │ (Background) │ -└─────────────────┘ └──────────────────┘ └─────────────────┘ - │ - ▼ - ┌─────────────────┐ - │ nvidia-smi │ - │ (GPU Metrics) │ - └─────────────────┘ -``` - -## API Endpoints - -- `GET /api/gpu-status` - Real-time GPU metrics -- `GET /api/mining-jobs` - Active mining jobs -- `GET /api/statistics` - Mining statistics -- `GET /api/services` - Available services - -## Service Management - -### Start Services -```bash -sudo systemctl start aitbc-miner -sudo systemctl start aitbc-miner-dashboard -``` - -### Stop Services -```bash -sudo systemctl stop aitbc-miner -sudo systemctl stop aitbc-miner-dashboard -``` - -### View Logs -```bash -sudo journalctl -u aitbc-miner -f -sudo journalctl -u aitbc-miner-dashboard -f -``` - -## GPU Requirements - -- NVIDIA GPU with CUDA support -- nvidia-smi utility installed -- GPU memory: 4GB+ recommended -- CUDA drivers up to date - -## Troubleshooting - -### Dashboard Not Loading -```bash -# Check service status -sudo systemctl status aitbc-miner-dashboard - -# Check logs -sudo journalctl -u aitbc-miner-dashboard -n 50 -``` - -### GPU Not Detected -```bash -# Verify nvidia-smi -nvidia-smi - -# Check GPU permissions -ls -l /dev/nvidia* -``` - -### No Mining Jobs -```bash -# Check miner service -sudo systemctl status aitbc-miner - -# Restart if needed -sudo systemctl restart aitbc-miner -``` - -## Configuration - -### GPU Monitoring -The dashboard automatically detects NVIDIA GPUs using nvidia-smi. - -### Mining Performance -Adjust mining parameters in `miner_service.py`: -- Job frequency -- Processing duration -- Success rates - -### Dashboard Port -Change port in `dashboard_server.py` (default: 8080). - -## Security - -- Dashboard runs on localhost by default -- No external database required -- Minimal dependencies -- Read-only GPU monitoring - -## Development - -### Extend Services -Add new mining services in the `get_services()` method. - -### Customize UI -Modify `index.html` to change the dashboard appearance. - -### Add Metrics -Extend the API with new endpoints for additional metrics. - -## License - -AITBC Project - Internal Use Only diff --git a/apps/miner-dashboard/aitbc-miner-dashboard.service b/apps/miner-dashboard/aitbc-miner-dashboard.service deleted file mode 100644 index 46922fd6..00000000 --- a/apps/miner-dashboard/aitbc-miner-dashboard.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=AITBC Miner Dashboard -After=network.target - -[Service] -Type=simple -User=root -WorkingDirectory=/opt/aitbc-miner-dashboard -Environment=PYTHONPATH=/opt/aitbc-miner-dashboard -ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python dashboard_server.py -Restart=always -RestartSec=3 - -[Install] -WantedBy=multi-user.target diff --git a/apps/miner-dashboard/aitbc-miner.service b/apps/miner-dashboard/aitbc-miner.service deleted file mode 100644 index 43cbfa3f..00000000 --- a/apps/miner-dashboard/aitbc-miner.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=AITBC GPU Mining Service -After=network.target - -[Service] -Type=simple -User=root -WorkingDirectory=/opt/aitbc-miner-dashboard -Environment=PYTHONPATH=/opt/aitbc-miner-dashboard -ExecStart=/opt/aitbc-miner-dashboard/.venv/bin/python miner_service.py -Restart=always -RestartSec=3 - -[Install] -WantedBy=multi-user.target diff --git a/apps/miner-dashboard/dashboard_server.py b/apps/miner-dashboard/dashboard_server.py deleted file mode 100644 index 2256cd50..00000000 --- a/apps/miner-dashboard/dashboard_server.py +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env python3 -"""AITBC Miner Dashboard API - Real-time GPU and mining status""" - -from http.server import HTTPServer, BaseHTTPRequestHandler -import json -import subprocess -import psutil -from datetime import datetime, timedelta -import random - -class MinerDashboardHandler(BaseHTTPRequestHandler): - def send_json_response(self, data, status=200): - """Send JSON response""" - self.send_response(status) - self.send_header('Content-Type', 'application/json') - self.send_header('Access-Control-Allow-Origin', '*') - self.end_headers() - self.wfile.write(json.dumps(data, default=str).encode()) - - def do_GET(self): - """Handle GET requests""" - if self.path == '/api/gpu-status': - self.get_gpu_status() - elif self.path == '/api/mining-jobs': - self.get_mining_jobs() - elif self.path == '/api/statistics': - self.get_statistics() - elif self.path == '/api/services': - self.get_services() - elif self.path == '/' or self.path == '/index.html': - self.serve_dashboard() - else: - self.send_error(404) - - def get_gpu_status(self): - """Get real GPU status from nvidia-smi""" - try: - # Parse nvidia-smi output - result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,performance_state', '--format=csv,noheader,nounits'], - capture_output=True, text=True) - - if result.returncode == 0: - values = result.stdout.strip().split(', ') - gpu_data = { - 'utilization': int(values[0]), - 'temperature': int(values[1]), - 'power_usage': float(values[2]), - 'memory_used': float(values[3]) / 1024, # Convert MB to GB - 'memory_total': float(values[4]) / 1024, - 'performance_state': values[5], - 'timestamp': datetime.now().isoformat() - } - self.send_json_response(gpu_data) - else: - # Fallback to mock data - self.send_json_response({ - 'utilization': 0, - 'temperature': 43, - 'power_usage': 18, - 'memory_used': 2.9, - 'memory_total': 16, - 'performance_state': 'P8', - 'timestamp': datetime.now().isoformat() - }) - except Exception as e: - self.send_json_response({'error': str(e)}, 500) - - def get_mining_jobs(self): - """Get active mining jobs from the miner service""" - try: - # Connect to miner service via socket or API - # For now, simulate with mock data - jobs = [ - { - 'id': 'job_12345', - 'name': 'Matrix Computation', - 'progress': 85, - 'status': 'running', - 'started_at': (datetime.now() - timedelta(minutes=10)).isoformat(), - 'estimated_completion': (datetime.now() + timedelta(minutes=2)).isoformat() - }, - { - 'id': 'job_12346', - 'name': 'Hash Validation', - 'progress': 42, - 'status': 'running', - 'started_at': (datetime.now() - timedelta(minutes=5)).isoformat(), - 'estimated_completion': (datetime.now() + timedelta(minutes=7)).isoformat() - } - ] - self.send_json_response(jobs) - except Exception as e: - self.send_json_response({'error': str(e)}, 500) - - def get_statistics(self): - """Get mining statistics""" - stats = { - 'total_jobs_completed': random.randint(1200, 1300), - 'average_job_time': round(random.uniform(10, 15), 1), - 'success_rate': round(random.uniform(95, 99), 1), - 'total_earned_btc': round(random.uniform(0.004, 0.005), 4), - 'total_earned_aitbc': random.randint(100, 200), - 'uptime_hours': 24, - 'hash_rate': round(random.uniform(45, 55), 1), # MH/s - 'efficiency': round(random.uniform(0.8, 1.2), 2) # W/MH - } - self.send_json_response(stats) - - def get_services(self): - """Get available mining services""" - services = [ - { - 'name': 'GPU Computing', - 'description': 'CUDA cores available for computation', - 'status': 'active', - 'capacity': '100%', - 'utilization': 65 - }, - { - 'name': 'Parallel Processing', - 'description': 'Multi-threaded job execution', - 'status': 'active', - 'capacity': '8 threads', - 'utilization': 45 - }, - { - 'name': 'Hash Generation', - 'description': 'Proof-of-work computation', - 'status': 'standby', - 'capacity': '50 MH/s', - 'utilization': 0 - }, - { - 'name': 'AI Model Training', - 'description': 'Machine learning operations', - 'status': 'available', - 'capacity': '16GB VRAM', - 'utilization': 0 - }, - { - 'name': 'Blockchain Validation', - 'description': 'AITBC block validation', - 'status': 'active', - 'capacity': '1000 tx/s', - 'utilization': 23 - }, - { - 'name': 'Data Processing', - 'description': 'Large dataset processing', - 'status': 'available', - 'capacity': '500GB/hour', - 'utilization': 0 - } - ] - self.send_json_response(services) - - def serve_dashboard(self): - """Serve the dashboard HTML""" - try: - with open('index.html', 'r') as f: - self.send_response(200) - self.send_header('Content-Type', 'text/html') - self.end_headers() - self.wfile.write(f.read().encode()) - except FileNotFoundError: - self.send_error(404, 'Dashboard not found') - -def run_server(port=8080): - """Run the miner dashboard server""" - server = HTTPServer(('localhost', port), MinerDashboardHandler) - print(f""" -╔═══════════════════════════════════════╗ -║ AITBC Miner Dashboard Server ║ -╠═══════════════════════════════════════╣ -║ Dashboard running at: ║ -║ http://localhost:{port} ║ -║ ║ -║ GPU Monitoring Active! ║ -║ Real-time Mining Status ║ -╚═══════════════════════════════════════╝ -""") - server.serve_forever() - -if __name__ == "__main__": - run_server() diff --git a/apps/miner-dashboard/deploy.sh b/apps/miner-dashboard/deploy.sh deleted file mode 100644 index 6044a393..00000000 --- a/apps/miner-dashboard/deploy.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash - -echo "=== AITBC Miner Dashboard & Service Deployment ===" -echo "" - -# Check if running as root -if [ "$EUID" -ne 0 ]; then - echo "Please run as root (use sudo)" - exit 1 -fi - -# Create directories -echo "Creating directories..." -mkdir -p /opt/aitbc-miner-dashboard -mkdir -p /var/log/aitbc-miner - -# Copy files -echo "Copying files..." -cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/ - -# Set permissions -chown -R root:root /opt/aitbc-miner-dashboard -chmod +x /opt/aitbc-miner-dashboard/*.py -chmod +x /opt/aitbc-miner-dashboard/*.sh - -# Create virtual environment -echo "Setting up Python environment..." -cd /opt/aitbc-miner-dashboard -python3 -m venv .venv -.venv/bin/pip install psutil - -# Install systemd services -echo "Installing systemd services..." -cp aitbc-miner-dashboard.service /etc/systemd/system/ -cp aitbc-miner.service /etc/systemd/system/ - -# Reload systemd -systemctl daemon-reload - -# Enable and start services -echo "Starting services..." -systemctl enable aitbc-miner -systemctl enable aitbc-miner-dashboard -systemctl start aitbc-miner -systemctl start aitbc-miner-dashboard - -# Wait for services to start -sleep 5 - -# Check status -echo "" -echo "=== Service Status ===" -systemctl status aitbc-miner --no-pager -l | head -5 -systemctl status aitbc-miner-dashboard --no-pager -l | head -5 - -# Get IP address -IP=$(hostname -I | awk '{print $1}') - -echo "" -echo "✅ Deployment complete!" -echo "" -echo "Services:" -echo " - Miner Service: Running (background)" -echo " - Dashboard: http://localhost:8080" -echo "" -echo "Access from other machines:" -echo " http://$IP:8080" -echo "" -echo "To view logs:" -echo " sudo journalctl -u aitbc-miner -f" -echo " sudo journalctl -u aitbc-miner-dashboard -f" diff --git a/apps/miner-dashboard/deploy_on_host.sh b/apps/miner-dashboard/deploy_on_host.sh deleted file mode 100644 index df0fbdfa..00000000 --- a/apps/miner-dashboard/deploy_on_host.sh +++ /dev/null @@ -1,356 +0,0 @@ -#!/bin/bash - -echo "========================================" -echo " AITBC GPU Miner Dashboard Setup" -echo " Running on HOST (at1/localhost)" -echo "========================================" -echo "" - -# Check if we have GPU access -if ! command -v nvidia-smi &> /dev/null; then - echo "❌ ERROR: nvidia-smi not found!" - echo "Please ensure NVIDIA drivers are installed on the host." - exit 1 -fi - -echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)" -echo "" - -# Create dashboard directory -mkdir -p ~/miner-dashboard -cd ~/miner-dashboard - -echo "Creating dashboard files..." - -# Create the main dashboard HTML -cat > index.html << 'HTML' - - - - - - AITBC GPU Miner Dashboard - Host - - - - - - -
-
-
-
- -
-

AITBC GPU Miner Dashboard

-

✓ Running on HOST with direct GPU access

-
-
-
- - - GPU Online - - -
-
-
-
- - -
- -
-
-
-

NVIDIA GeForce RTX 4060 Ti

-

Real-time GPU Performance Monitor

-
-
-
0%
-
GPU Utilization
-
-
- -
-
-
-
-

Temperature

-

--°C

-
- -
-
-
-
-
-

Power Usage

-

--W

-
- -
-
-
-
-
-

Memory Used

-

--GB

-
- -
-
-
-
-
-

Performance

-

P8

-
- -
-
-
-
- - -
- -
-

- - Mining Operations - 0 active jobs -

-
-
- -

Miner Idle

-

Ready to accept mining jobs

-
-
-
- - -
-

- - GPU Services Status -

-
-
-
- -
-

CUDA Computing

-

4352 CUDA cores available

-
-
- Active -
-
-
- -
-

Parallel Processing

-

Multi-threaded operations

-
-
- Active -
-
-
- -
-

Hash Generation

-

Proof-of-work computation

-
-
- Standby -
-
-
- -
-

AI Model Training

-

Machine learning operations

-
-
- Available -
-
-
-
- - -
-
-

GPU Utilization (Last Hour)

- -
-
-

Hash Rate Performance

- -
-
- - -
-

System Information

-
-
- -

Host System

-

Loading...

-
-
- -

GPU Access

-

Direct

-
-
- -

Container

-

Not Used

-
-
-
-
- - - - - -HTML - -# Create startup script -cat > start-dashboard.sh << 'EOF' -#!/bin/bash -cd ~/miner-dashboard -echo "" -echo "========================================" -echo " Starting AITBC GPU Miner Dashboard" -echo "========================================" -echo "" -echo "Dashboard will be available at:" -echo " Local: http://localhost:8080" -echo " Network: http://$(hostname -I | awk '{print $1}'):8080" -echo "" -echo "Press Ctrl+C to stop the dashboard" -echo "" -python3 -m http.server 8080 --bind 0.0.0.0 -EOF - -chmod +x start-dashboard.sh - -echo "" -echo "✅ Dashboard setup complete!" -echo "" -echo "To start the dashboard, run:" -echo " ~/miner-dashboard/start-dashboard.sh" -echo "" -echo "Dashboard location: ~/miner-dashboard/" -echo "" -echo "========================================" diff --git a/apps/miner-dashboard/host_deploy.sh b/apps/miner-dashboard/host_deploy.sh deleted file mode 100644 index 82c643ae..00000000 --- a/apps/miner-dashboard/host_deploy.sh +++ /dev/null @@ -1,313 +0,0 @@ -#!/bin/bash - -echo "=== AITBC Miner Dashboard - Host Deployment ===" -echo "" - -# Check if running on host with GPU -if ! command -v nvidia-smi &> /dev/null; then - echo "❌ nvidia-smi not found. Please install NVIDIA drivers." - exit 1 -fi - -# Create directory -mkdir -p ~/miner-dashboard -cd ~/miner-dashboard - -echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)" - -# Create dashboard HTML -cat > index.html << 'EOF' - - - - - - AITBC GPU Miner Dashboard - - - - - -
-
-
-
- -
-

AITBC Miner Dashboard

-

Host GPU Mining Operations

-
-
-
- - - GPU Connected - - -
-
-
-
- -
- -
-
-
-

Loading...

-

Real-time GPU Status

-
-
-
0%
-
GPU Utilization
-
-
- -
-
-
-
-

Temperature

-

--°C

-
- -
-
-
-
-
-

Power Usage

-

--W

-
- -
-
-
-
-
-

Memory Used

-

--GB

-
- -
-
-
-
-
-

Performance

-

--

-
- -
-
-
-
- - -
- -
-

- - Mining Status -

-
- -

Miner Idle

-

Ready to accept mining jobs

- -
-
- - -
-

- - GPU Services Available -

-
-
-
-

GPU Computing

-

CUDA cores ready

-
- Available -
-
-
-

Hash Generation

-

Proof-of-work capable

-
- Available -
-
-
-

AI Model Training

-

ML operations ready

-
- Available -
-
-
-
- - -
-

System Information

-
-
-

Host System

-

Loading...

-
-
-

GPU Driver

-

Loading...

-
-
-

CUDA Version

-

Loading...

-
-
-
-
- - - - -EOF - -# Create Python server with API -cat > server.py << 'EOF' -import json -import subprocess -import socket -from http.server import HTTPServer, BaseHTTPRequestHandler -from urllib.parse import urlparse - -class MinerHandler(BaseHTTPRequestHandler): - def do_GET(self): - if self.path == '/api/gpu': - self.send_json(self.get_gpu_info()) - elif self.path == '/' or self.path == '/index.html': - self.serve_file('index.html') - else: - self.send_error(404) - - def get_gpu_info(self): - try: - # Get GPU info - result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total,driver_version,cuda_version', '--format=csv,noheader,nounits'], - capture_output=True, text=True) - - if result.returncode == 0: - values = result.stdout.strip().split(', ') - return { - 'name': values[0], - 'utilization': int(values[1]), - 'temperature': int(values[2]), - 'power': float(values[3]), - 'memory_used': float(values[4]) / 1024, - 'memory_total': float(values[5]) / 1024, - 'driver_version': values[6], - 'cuda_version': values[7], - 'hostname': socket.gethostname(), - 'performance_state': 'P8' # Would need additional query - } - except Exception as e: - return {'error': str(e)} - - def send_json(self, data): - self.send_response(200) - self.send_header('Content-Type', 'application/json') - self.end_headers() - self.wfile.write(json.dumps(data).encode()) - - def serve_file(self, filename): - try: - with open(filename, 'r') as f: - self.send_response(200) - self.send_header('Content-Type', 'text/html') - self.end_headers() - self.wfile.write(f.read().encode()) - except FileNotFoundError: - self.send_error(404) - -if __name__ == '__main__': - server = HTTPServer(('0.0.0.0', 8080), MinerHandler) - print(''' -╔═══════════════════════════════════════╗ -║ AITBC Miner Dashboard ║ -║ Running on HOST with GPU access ║ -╠═══════════════════════════════════════╣ -║ Dashboard: http://localhost:8080 ║ -║ Host: $(hostname) ║ -║ GPU: $(nvidia-smi --query-gpu=name --format=csv,noheader) ║ -╚═══════════════════════════════════════╝ -''') - server.serve_forever() -EOF - -# Make server executable -chmod +x server.py - -echo "" -echo "✅ Dashboard created!" -echo "" -echo "To start the dashboard:" -echo " cd ~/miner-dashboard" -echo " python3 server.py" -echo "" -echo "Then access at: http://localhost:8080" -echo "" -echo "To auto-start on boot, add to crontab:" -echo " @reboot cd ~/miner-dashboard && python3 server.py &" diff --git a/apps/miner-dashboard/host_only_setup.sh b/apps/miner-dashboard/host_only_setup.sh deleted file mode 100644 index fd40be9c..00000000 --- a/apps/miner-dashboard/host_only_setup.sh +++ /dev/null @@ -1,189 +0,0 @@ -#!/bin/bash - -echo "=== AITBC Miner Dashboard - Host Setup ===" -echo "" -echo "This script sets up the dashboard on the HOST machine (at1)" -echo "NOT in the container (aitbc)" -echo "" - -# Check if we have GPU access -if ! command -v nvidia-smi &> /dev/null; then - echo "❌ ERROR: nvidia-smi not found!" - echo "This script must be run on the HOST with GPU access" - exit 1 -fi - -echo "✅ GPU detected: $(nvidia-smi --query-gpu=name --format=csv,noheader)" - -# Create dashboard directory -mkdir -p ~/miner-dashboard -cd ~/miner-dashboard - -# Create HTML dashboard -cat > index.html << 'HTML' - - - - AITBC GPU Miner Dashboard - HOST - - - - -
-
-
-
- -
-

AITBC GPU Miner Dashboard

-

Running on HOST with direct GPU access

-
-
-
- - GPU Connected -
-
-
- -
-

GPU Status Monitor

-
-
- -

Utilization

-

0%

-
-
- -

Temperature

-

--°C

-
-
- -

Power

-

--W

-
-
- -

Memory

-

--GB

-
-
-
- -
-
-

- - Mining Operations -

-
-
-
- Status - Idle -
-

Miner is ready to accept jobs

-
-
-
- Hash Rate - 0 MH/s -
-
-
-
-
-
-
- -
-

- - GPU Services -

-
-
- CUDA Computing - Active -
-
- Parallel Processing - Active -
-
- Hash Generation - Standby -
-
- AI Model Training - Available -
-
-
-
- -
-

System Information

-
-
-

Location

-

HOST System

-
-
-

GPU Access

-

Direct

-
-
-

Container

-

Not Used

-
-
-
-
- - - - -HTML - -# Create simple server -cat > serve.sh << 'EOF' -#!/bin/bash -cd ~/miner-dashboard -echo "Starting GPU Miner Dashboard on HOST..." -echo "Access at: http://localhost:8080" -echo "Press Ctrl+C to stop" -python3 -m http.server 8080 --bind 0.0.0.0 -EOF - -chmod +x serve.sh - -echo "" -echo "✅ Dashboard created on HOST!" -echo "" -echo "To run the dashboard:" -echo " ~/miner-dashboard/serve.sh" -echo "" -echo "Dashboard will be available at:" -echo " - Local: http://localhost:8080" -echo " - Network: http://$(hostname -I | awk '{print $1}'):8080" diff --git a/apps/miner-dashboard/index.html b/apps/miner-dashboard/index.html deleted file mode 100644 index 64d2f06c..00000000 --- a/apps/miner-dashboard/index.html +++ /dev/null @@ -1,449 +0,0 @@ - - - - - - AITBC Miner Dashboard - - - - - - - -
-
-
-
- -
-

AITBC Miner Dashboard

-

GPU Mining Operations Monitor

-
-
-
- - - Connected - - -
-
-
-
- - -
- -
-
-
-

NVIDIA GeForce RTX 4060 Ti

-

GPU Status & Performance

-
-
-
0%
-
GPU Utilization
-
-
- -
-
-
-
-

Temperature

-

43°C

-
- -
-
-
-
-
-

Power Usage

-

18W

-
- -
-
-
-
-
-

Memory Used

-

2.9GB

-
- -
-
-
-
-
-

Performance

-

P8

-
- -
-
-
-
- - -
- -
-

- - Active Mining Jobs -

-
-
-
-
-

Matrix Computation

-

Job ID: #12345

-
-
-

85%

-

Complete

-
-
-
-
-
-
-
-
-
-

Hash Validation

-

Job ID: #12346

-
-
-

42%

-

Complete

-
-
-
-
-
-
-
-
- - -
-

- - Available Services -

-
-
-
-

GPU Computing

-

CUDA cores available for computation

-
- Active -
-
-
-

Parallel Processing

-

Multi-threaded job execution

-
- Active -
-
-
-

Hash Generation

-

Proof-of-work computation

-
- Standby -
-
-
-

AI Model Training

-

Machine learning operations

-
- Available -
-
-
-
- - -
- -
-

GPU Utilization (Last Hour)

- -
- - -
-

Hash Rate Performance

- -
-
- - -
-
-

Total Jobs Completed

-

0

-
-
-

Average Job Time

-

0s

-
-
-

Success Rate

-

0%

-
-
-

Hash Rate

-

0 MH/s

-
-
- - -
-

Service Capabilities

-
- -
-
-
- - - - diff --git a/apps/miner-dashboard/miner_service.py b/apps/miner-dashboard/miner_service.py deleted file mode 100644 index 233ff31e..00000000 --- a/apps/miner-dashboard/miner_service.py +++ /dev/null @@ -1,181 +0,0 @@ -#!/usr/bin/env python3 -"""AITBC GPU Mining Service""" - -import subprocess -import time -import json -import random -from datetime import datetime -import threading - -class AITBCMiner: - def __init__(self): - self.running = False - self.jobs = [] - self.stats = { - 'total_jobs': 0, - 'completed_jobs': 0, - 'failed_jobs': 0, - 'hash_rate': 0, - 'uptime': 0 - } - self.start_time = None - - def start_mining(self): - """Start the mining service""" - self.running = True - self.start_time = time.time() - print("🚀 AITBC Miner started") - - # Start mining threads - mining_thread = threading.Thread(target=self._mining_loop) - mining_thread.daemon = True - mining_thread.start() - - # Start status monitoring - monitor_thread = threading.Thread(target=self._monitor_gpu) - monitor_thread.daemon = True - monitor_thread.start() - - def stop_mining(self): - """Stop the mining service""" - self.running = False - print("⛔ AITBC Miner stopped") - - def _mining_loop(self): - """Main mining loop""" - while self.running: - # Simulate job processing - if random.random() > 0.7: # 30% chance of new job - job = self._create_job() - self.jobs.append(job) - self._process_job(job) - - time.sleep(1) - - def _create_job(self): - """Create a new mining job""" - job_types = [ - 'Matrix Computation', - 'Hash Validation', - 'Block Verification', - 'Transaction Processing', - 'AI Model Training' - ] - - job = { - 'id': f"job_{int(time.time())}_{random.randint(1000, 9999)}", - 'name': random.choice(job_types), - 'progress': 0, - 'status': 'running', - 'created_at': datetime.now().isoformat() - } - - self.stats['total_jobs'] += 1 - return job - - def _process_job(self, job): - """Process a mining job""" - processing_thread = threading.Thread(target=self._process_job_thread, args=(job,)) - processing_thread.daemon = True - processing_thread.start() - - def _process_job_thread(self, job): - """Process job in separate thread""" - duration = random.randint(5, 30) - steps = 20 - - for i in range(steps + 1): - if not self.running: - break - - job['progress'] = int((i / steps) * 100) - time.sleep(duration / steps) - - if self.running: - job['status'] = 'completed' if random.random() > 0.05 else 'failed' - job['completed_at'] = datetime.now().isoformat() - - if job['status'] == 'completed': - self.stats['completed_jobs'] += 1 - else: - self.stats['failed_jobs'] += 1 - - def _monitor_gpu(self): - """Monitor GPU status""" - while self.running: - try: - # Get GPU utilization - result = subprocess.run(['nvidia-smi', '--query-gpu=utilization.gpu', '--format=csv,noheader,nounits'], - capture_output=True, text=True) - - if result.returncode == 0: - gpu_util = int(result.stdout.strip()) - # Simulate hash rate based on GPU utilization - self.stats['hash_rate'] = round(gpu_util * 0.5 + random.uniform(-5, 5), 1) - - except Exception as e: - print(f"GPU monitoring error: {e}") - self.stats['hash_rate'] = random.uniform(40, 60) - - # Update uptime - if self.start_time: - self.stats['uptime'] = int(time.time() - self.start_time) - - time.sleep(2) - - def get_status(self): - """Get current mining status""" - return { - 'running': self.running, - 'stats': self.stats.copy(), - 'active_jobs': [j for j in self.jobs if j['status'] == 'running'], - 'gpu_info': self._get_gpu_info() - } - - def _get_gpu_info(self): - """Get GPU information""" - try: - result = subprocess.run(['nvidia-smi', '--query-gpu=name,utilization.gpu,temperature.gpu,power.draw,memory.used,memory.total', - '--format=csv,noheader,nounits'], - capture_output=True, text=True) - - if result.returncode == 0: - values = result.stdout.strip().split(', ') - return { - 'name': values[0], - 'utilization': int(values[1]), - 'temperature': int(values[2]), - 'power': float(values[3]), - 'memory_used': float(values[4]), - 'memory_total': float(values[5]) - } - except: - pass - - return { - 'name': 'NVIDIA GeForce RTX 4060 Ti', - 'utilization': 0, - 'temperature': 43, - 'power': 18, - 'memory_used': 2902, - 'memory_total': 16380 - } - -# Global miner instance -miner = AITBCMiner() - -if __name__ == "__main__": - print("AITBC GPU Mining Service") - print("=" * 40) - - try: - miner.start_mining() - - # Keep running - while True: - time.sleep(10) - - except KeyboardInterrupt: - print("\nShutting down...") - miner.stop_mining() diff --git a/apps/miner-dashboard/quick_deploy.sh b/apps/miner-dashboard/quick_deploy.sh deleted file mode 100644 index aa68253a..00000000 --- a/apps/miner-dashboard/quick_deploy.sh +++ /dev/null @@ -1,180 +0,0 @@ -#!/bin/bash - -echo "=== Quick AITBC Miner Dashboard Setup ===" - -# Create directory -sudo mkdir -p /opt/aitbc-miner-dashboard - -# Create simple dashboard -cat > /opt/aitbc-miner-dashboard/index.html << 'HTML' - - - - AITBC Miner Dashboard - - - - -
-
-

- - AITBC Miner Dashboard -

-
- - GPU Connected -
-
- -
-

NVIDIA GeForce RTX 4060 Ti

-
-
-

Utilization

-

0%

-
-
-

Temperature

-

43°C

-
-
-

Power

-

18W

-
-
-

Memory

-

2.9GB

-
-
-
- -
-
-

- - Mining Jobs -

-
- -

No active jobs

-

Miner is ready to receive jobs

-
-
- -
-

- - Available Services -

-
-
- GPU Computing - Active -
-
- Parallel Processing - Active -
-
- Hash Generation - Standby -
-
- AI Model Training - Available -
-
-
-
- -
-

Mining Statistics

-
-
-

0

-

Jobs Completed

-
-
-

0s

-

Avg Job Time

-
-
-

100%

-

Success Rate

-
-
-

0 MH/s

-

Hash Rate

-
-
-
-
- - - - -HTML - -# Create simple Python server -cat > /opt/aitbc-miner-dashboard/serve.py << 'PY' -import http.server -import socketserver -import os - -PORT = 8080 -os.chdir('/opt/aitbc-miner-dashboard') - -Handler = http.server.SimpleHTTPRequestHandler -with socketserver.TCPServer(("", PORT), Handler) as httpd: - print(f"Dashboard running at http://localhost:{PORT}") - httpd.serve_forever() -PY - -# Create systemd service -cat > /etc/systemd/system/aitbc-miner-dashboard.service << 'EOF' -[Unit] -Description=AITBC Miner Dashboard -After=network.target - -[Service] -Type=simple -User=root -WorkingDirectory=/opt/aitbc-miner-dashboard -ExecStart=/usr/bin/python3 serve.py -Restart=always - -[Install] -WantedBy=multi-user.target -EOF - -# Start service -systemctl daemon-reload -systemctl enable aitbc-miner-dashboard -systemctl start aitbc-miner-dashboard - -echo "" -echo "✅ Dashboard deployed!" -echo "Access at: http://localhost:8080" -echo "Check status: systemctl status aitbc-miner-dashboard" diff --git a/apps/miner-dashboard/setup.sh b/apps/miner-dashboard/setup.sh deleted file mode 100644 index 55ba6183..00000000 --- a/apps/miner-dashboard/setup.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -echo "=== AITBC Miner Dashboard Setup ===" -echo "" - -# Create directory -sudo mkdir -p /opt/aitbc-miner-dashboard -sudo cp -r /home/oib/windsurf/aitbc/apps/miner-dashboard/* /opt/aitbc-miner-dashboard/ - -# Create virtual environment -cd /opt/aitbc-miner-dashboard -sudo python3 -m venv .venv -sudo .venv/bin/pip install psutil - -# Install systemd service -sudo cp aitbc-miner-dashboard.service /etc/systemd/system/ -sudo systemctl daemon-reload -sudo systemctl enable aitbc-miner-dashboard -sudo systemctl start aitbc-miner-dashboard - -# Wait for service to start -sleep 3 - -# Check status -sudo systemctl status aitbc-miner-dashboard --no-pager -l | head -10 - -echo "" -echo "✅ Miner Dashboard is running at: http://localhost:8080" -echo "" -echo "To access from other machines, use: http://$(hostname -I | awk '{print $1}'):8080" diff --git a/apps/miner-node/README.md b/apps/miner-node/README.md deleted file mode 100644 index deecb647..00000000 --- a/apps/miner-node/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Miner Node - -## Purpose & Scope - -Worker daemon responsible for executing compute jobs on CPU/GPU hardware, reporting telemetry, and submitting proofs back to the coordinator. See `docs/bootstrap/miner_node.md` for the detailed implementation roadmap. - -## Development Setup - -- Create a Python virtual environment under `apps/miner-node/.venv`. -- Install dependencies (FastAPI optional for health endpoint, `httpx`, `pydantic`, `psutil`). -- Implement the package structure described in the bootstrap guide. - -## Production Deployment (systemd) - -1. Copy the project to `/opt/aitbc/apps/miner-node/` on the target host. -2. Create a virtual environment and install dependencies as needed. -3. Populate `.env` with coordinator URL/API token settings. -4. Run the installer script from repo root: - ```bash - sudo scripts/ops/install_miner_systemd.sh - ``` - This installs `configs/systemd/aitbc-miner.service`, reloads systemd, and enables the service. -5. Check status/logs: - ```bash - sudo systemctl status aitbc-miner - journalctl -u aitbc-miner -f - ``` diff --git a/apps/miner-node/plugins/__init__.py b/apps/miner-node/plugins/__init__.py deleted file mode 100644 index c07c9b05..00000000 --- a/apps/miner-node/plugins/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Miner plugin system for GPU service execution -""" - -from .base import ServicePlugin, PluginResult -from .registry import PluginRegistry -from .exceptions import PluginError, PluginNotFoundError - -__all__ = [ - "ServicePlugin", - "PluginResult", - "PluginRegistry", - "PluginError", - "PluginNotFoundError" -] diff --git a/apps/miner-node/plugins/base.py b/apps/miner-node/plugins/base.py deleted file mode 100644 index d604d15e..00000000 --- a/apps/miner-node/plugins/base.py +++ /dev/null @@ -1,111 +0,0 @@ -""" -Base plugin interface for GPU service execution -""" - -from abc import ABC, abstractmethod -from typing import Dict, Any, Optional, List -from dataclasses import dataclass -from datetime import datetime -import asyncio - - -@dataclass -class PluginResult: - """Result from plugin execution""" - success: bool - data: Optional[Dict[str, Any]] = None - error: Optional[str] = None - metrics: Optional[Dict[str, Any]] = None - execution_time: Optional[float] = None - - -class ServicePlugin(ABC): - """Base class for all service plugins""" - - def __init__(self): - self.service_id = None - self.name = None - self.version = "1.0.0" - self.description = "" - self.capabilities = [] - - @abstractmethod - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute the service with given parameters""" - pass - - @abstractmethod - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate request parameters, return list of errors""" - pass - - @abstractmethod - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for this plugin""" - pass - - def get_metrics(self) -> Dict[str, Any]: - """Get plugin-specific metrics""" - return { - "service_id": self.service_id, - "name": self.name, - "version": self.version - } - - async def health_check(self) -> bool: - """Check if plugin dependencies are available""" - return True - - def setup(self) -> None: - """Initialize plugin resources""" - pass - - def cleanup(self) -> None: - """Cleanup plugin resources""" - pass - - -class GPUPlugin(ServicePlugin): - """Base class for GPU-accelerated plugins""" - - def __init__(self): - super().__init__() - self.gpu_available = False - self.vram_gb = 0 - self.cuda_available = False - - def setup(self) -> None: - """Check GPU availability""" - self._detect_gpu() - - def _detect_gpu(self) -> None: - """Detect GPU and VRAM""" - try: - import torch - if torch.cuda.is_available(): - self.gpu_available = True - self.cuda_available = True - self.vram_gb = torch.cuda.get_device_properties(0).total_memory / (1024**3) - except ImportError: - pass - - try: - import GPUtil - gpus = GPUtil.getGPUs() - if gpus: - self.gpu_available = True - self.vram_gb = gpus[0].memoryTotal / 1024 - except ImportError: - pass - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Default GPU requirements""" - return { - "gpu": "any", - "vram_gb": 4, - "cuda": "recommended" - } - - async def health_check(self) -> bool: - """Check GPU health""" - return self.gpu_available diff --git a/apps/miner-node/plugins/blender.py b/apps/miner-node/plugins/blender.py deleted file mode 100644 index f0eef7bf..00000000 --- a/apps/miner-node/plugins/blender.py +++ /dev/null @@ -1,371 +0,0 @@ -""" -Blender 3D rendering plugin -""" - -import asyncio -import os -import subprocess -import tempfile -import json -from typing import Dict, Any, List, Optional -import time - -from .base import GPUPlugin, PluginResult -from .exceptions import PluginExecutionError - - -class BlenderPlugin(GPUPlugin): - """Plugin for Blender 3D rendering""" - - def __init__(self): - super().__init__() - self.service_id = "blender" - self.name = "Blender Rendering" - self.version = "1.0.0" - self.description = "Render 3D scenes using Blender" - self.capabilities = ["render", "animation", "cycles", "eevee"] - - def setup(self) -> None: - """Initialize Blender dependencies""" - super().setup() - - # Check for Blender installation - try: - result = subprocess.run( - ["blender", "--version"], - capture_output=True, - text=True, - check=True - ) - self.blender_path = "blender" - except (subprocess.CalledProcessError, FileNotFoundError): - raise PluginExecutionError("Blender not found. Install Blender for 3D rendering") - - # Check for bpy module (Python API) - try: - import bpy - self.bpy_available = True - except ImportError: - self.bpy_available = False - print("Warning: bpy module not available. Some features may be limited.") - - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate Blender request parameters""" - errors = [] - - # Check required parameters - if "blend_file" not in request and "scene_data" not in request: - errors.append("Either 'blend_file' or 'scene_data' must be provided") - - # Validate engine - engine = request.get("engine", "cycles") - valid_engines = ["cycles", "eevee", "workbench"] - if engine not in valid_engines: - errors.append(f"Invalid engine. Must be one of: {', '.join(valid_engines)}") - - # Validate resolution - resolution_x = request.get("resolution_x", 1920) - resolution_y = request.get("resolution_y", 1080) - - if not isinstance(resolution_x, int) or resolution_x < 1 or resolution_x > 65536: - errors.append("resolution_x must be an integer between 1 and 65536") - if not isinstance(resolution_y, int) or resolution_y < 1 or resolution_y > 65536: - errors.append("resolution_y must be an integer between 1 and 65536") - - # Validate samples - samples = request.get("samples", 128) - if not isinstance(samples, int) or samples < 1 or samples > 10000: - errors.append("samples must be an integer between 1 and 10000") - - # Validate frame range for animation - if request.get("animation", False): - frame_start = request.get("frame_start", 1) - frame_end = request.get("frame_end", 250) - - if not isinstance(frame_start, int) or frame_start < 1: - errors.append("frame_start must be >= 1") - if not isinstance(frame_end, int) or frame_end < frame_start: - errors.append("frame_end must be >= frame_start") - - return errors - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for Blender""" - return { - "gpu": "recommended", - "vram_gb": 4, - "ram_gb": 16, - "cuda": "recommended" - } - - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute Blender rendering""" - start_time = time.time() - - try: - # Validate request - errors = self.validate_request(request) - if errors: - return PluginResult( - success=False, - error=f"Validation failed: {'; '.join(errors)}" - ) - - # Get parameters - blend_file = request.get("blend_file") - scene_data = request.get("scene_data") - engine = request.get("engine", "cycles") - resolution_x = request.get("resolution_x", 1920) - resolution_y = request.get("resolution_y", 1080) - samples = request.get("samples", 128) - animation = request.get("animation", False) - frame_start = request.get("frame_start", 1) - frame_end = request.get("frame_end", 250) - output_format = request.get("output_format", "png") - gpu_acceleration = request.get("gpu_acceleration", self.gpu_available) - - # Prepare input file - input_file = await self._prepare_input_file(blend_file, scene_data) - - # Build Blender command - cmd = self._build_blender_command( - input_file=input_file, - engine=engine, - resolution_x=resolution_x, - resolution_y=resolution_y, - samples=samples, - animation=animation, - frame_start=frame_start, - frame_end=frame_end, - output_format=output_format, - gpu_acceleration=gpu_acceleration - ) - - # Execute Blender - output_files = await self._execute_blender(cmd, animation, frame_start, frame_end) - - # Get render statistics - render_stats = await self._get_render_stats(output_files[0] if output_files else None) - - # Clean up input file if created from scene data - if scene_data: - os.unlink(input_file) - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "output_files": output_files, - "count": len(output_files), - "animation": animation, - "parameters": { - "engine": engine, - "resolution": f"{resolution_x}x{resolution_y}", - "samples": samples, - "gpu_acceleration": gpu_acceleration - } - }, - metrics={ - "engine": engine, - "frames_rendered": len(output_files), - "render_time": execution_time, - "time_per_frame": execution_time / len(output_files) if output_files else 0, - "samples_per_second": (samples * len(output_files)) / execution_time if execution_time > 0 else 0, - "render_stats": render_stats - }, - execution_time=execution_time - ) - - except Exception as e: - return PluginResult( - success=False, - error=str(e), - execution_time=time.time() - start_time - ) - - async def _prepare_input_file(self, blend_file: Optional[str], scene_data: Optional[Dict]) -> str: - """Prepare input .blend file""" - if blend_file: - # Use provided file - if not os.path.exists(blend_file): - raise PluginExecutionError(f"Blend file not found: {blend_file}") - return blend_file - elif scene_data: - # Create blend file from scene data - if not self.bpy_available: - raise PluginExecutionError("Cannot create scene without bpy module") - - # Create a temporary Python script to generate the scene - script = tempfile.mktemp(suffix=".py") - output_blend = tempfile.mktemp(suffix=".blend") - - with open(script, "w") as f: - f.write(f""" -import bpy -import json - -# Load scene data -scene_data = json.loads('''{json.dumps(scene_data)}''') - -# Clear default scene -bpy.ops.object.select_all(action='SELECT') -bpy.ops.object.delete() - -# Create scene from data -# This is a simplified example - in practice, you'd parse the scene_data -# and create appropriate objects, materials, lights, etc. - -# Save blend file -bpy.ops.wm.save_as_mainfile(filepath='{output_blend}') -""") - - # Run Blender to create the scene - cmd = [self.blender_path, "--background", "--python", script] - process = await asyncio.create_subprocess_exec(*cmd) - await process.communicate() - - # Clean up script - os.unlink(script) - - return output_blend - else: - raise PluginExecutionError("Either blend_file or scene_data must be provided") - - def _build_blender_command( - self, - input_file: str, - engine: str, - resolution_x: int, - resolution_y: int, - samples: int, - animation: bool, - frame_start: int, - frame_end: int, - output_format: str, - gpu_acceleration: bool - ) -> List[str]: - """Build Blender command""" - cmd = [ - self.blender_path, - "--background", - input_file, - "--render-engine", engine, - "--render-format", output_format.upper() - ] - - # Add Python script for settings - script = tempfile.mktemp(suffix=".py") - with open(script, "w") as f: - f.write(f""" -import bpy - -# Set resolution -bpy.context.scene.render.resolution_x = {resolution_x} -bpy.context.scene.render.resolution_y = {resolution_y} - -# Set samples for Cycles -if bpy.context.scene.render.engine == 'CYCLES': - bpy.context.scene.cycles.samples = {samples} - - # Enable GPU rendering if available - if {str(gpu_acceleration).lower()}: - bpy.context.scene.cycles.device = 'GPU' - preferences = bpy.context.preferences - cycles_preferences = preferences.addons['cycles'].preferences - cycles_preferences.compute_device_type = 'CUDA' - cycles_preferences.get_devices() - for device in cycles_preferences.devices: - device.use = True - -# Set frame range for animation -if {str(animation).lower()}: - bpy.context.scene.frame_start = {frame_start} - bpy.context.scene.frame_end = {frame_end} - -# Set output path -bpy.context.scene.render.filepath = '{tempfile.mkdtemp()}/render_' - -# Save settings -bpy.ops.wm.save_mainfile() -""") - - cmd.extend(["--python", script]) - - # Add render command - if animation: - cmd.extend(["-a"]) # Render animation - else: - cmd.extend(["-f", "1"]) # Render single frame - - return cmd - - async def _execute_blender( - self, - cmd: List[str], - animation: bool, - frame_start: int, - frame_end: int - ) -> List[str]: - """Execute Blender command""" - process = await asyncio.create_subprocess_exec( - *cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE - ) - - stdout, stderr = await process.communicate() - - if process.returncode != 0: - error_msg = stderr.decode() if stderr else "Blender failed" - raise PluginExecutionError(f"Blender error: {error_msg}") - - # Find output files - output_dir = tempfile.mkdtemp() - output_pattern = os.path.join(output_dir, "render_*") - - if animation: - # Animation creates multiple files - import glob - output_files = glob.glob(output_pattern) - output_files.sort() # Ensure frame order - else: - # Single frame - output_files = [glob.glob(output_pattern)[0]] - - return output_files - - async def _get_render_stats(self, output_file: Optional[str]) -> Dict[str, Any]: - """Get render statistics""" - if not output_file or not os.path.exists(output_file): - return {} - - # Get file size and basic info - file_size = os.path.getsize(output_file) - - # Try to get image dimensions - try: - from PIL import Image - with Image.open(output_file) as img: - width, height = img.size - except: - width = height = None - - return { - "file_size": file_size, - "width": width, - "height": height, - "format": os.path.splitext(output_file)[1][1:].upper() - } - - async def health_check(self) -> bool: - """Check Blender health""" - try: - result = subprocess.run( - ["blender", "--version"], - capture_output=True, - check=True - ) - return True - except subprocess.CalledProcessError: - return False diff --git a/apps/miner-node/plugins/discovery.py b/apps/miner-node/plugins/discovery.py deleted file mode 100644 index 59595dd9..00000000 --- a/apps/miner-node/plugins/discovery.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -Plugin discovery and matching system -""" - -import asyncio -import logging -from typing import Dict, List, Set, Optional -import requests - -from .registry import registry -from .base import ServicePlugin -from .exceptions import PluginNotFoundError - -logger = logging.getLogger(__name__) - - -class ServiceDiscovery: - """Discovers and matches services to plugins""" - - def __init__(self, pool_hub_url: str, miner_id: str): - self.pool_hub_url = pool_hub_url - self.miner_id = miner_id - self.enabled_services: Set[str] = set() - self.service_configs: Dict[str, Dict] = {} - self._last_update = 0 - self._update_interval = 60 # seconds - - async def start(self) -> None: - """Start the discovery service""" - logger.info("Starting service discovery") - - # Initialize plugin registry - await registry.initialize() - - # Initial sync - await self.sync_services() - - # Start background sync task - asyncio.create_task(self._sync_loop()) - - async def sync_services(self) -> None: - """Sync enabled services from pool-hub""" - try: - # Get service configurations from pool-hub - response = requests.get( - f"{self.pool_hub_url}/v1/services/", - headers={"X-Miner-ID": self.miner_id} - ) - response.raise_for_status() - - services = response.json() - - # Update local state - new_enabled = set() - new_configs = {} - - for service in services: - if service.get("enabled", False): - service_id = service["service_type"] - new_enabled.add(service_id) - new_configs[service_id] = service - - # Find changes - added = new_enabled - self.enabled_services - removed = self.enabled_services - new_enabled - updated = set() - - for service_id in self.enabled_services & new_enabled: - if new_configs[service_id] != self.service_configs.get(service_id): - updated.add(service_id) - - # Apply changes - for service_id in removed: - await self._disable_service(service_id) - - for service_id in added: - await self._enable_service(service_id, new_configs[service_id]) - - for service_id in updated: - await self._update_service(service_id, new_configs[service_id]) - - # Update state - self.enabled_services = new_enabled - self.service_configs = new_configs - self._last_update = asyncio.get_event_loop().time() - - logger.info(f"Synced services: {len(self.enabled_services)} enabled") - - except Exception as e: - logger.error(f"Failed to sync services: {e}") - - async def _enable_service(self, service_id: str, config: Dict) -> None: - """Enable a service""" - try: - # Check if plugin exists - if service_id not in registry.list_plugins(): - logger.warning(f"No plugin available for service: {service_id}") - return - - # Load plugin - plugin = registry.load_plugin(service_id) - - # Validate hardware requirements - await self._validate_hardware_requirements(plugin, config) - - # Configure plugin if needed - if hasattr(plugin, 'configure'): - await plugin.configure(config.get('config', {})) - - logger.info(f"Enabled service: {service_id}") - - except Exception as e: - logger.error(f"Failed to enable service {service_id}: {e}") - - async def _disable_service(self, service_id: str) -> None: - """Disable a service""" - try: - # Unload plugin to free resources - registry.unload_plugin(service_id) - logger.info(f"Disabled service: {service_id}") - - except Exception as e: - logger.error(f"Failed to disable service {service_id}: {e}") - - async def _update_service(self, service_id: str, config: Dict) -> None: - """Update service configuration""" - # For now, just disable and re-enable - await self._disable_service(service_id) - await self._enable_service(service_id, config) - - async def _validate_hardware_requirements(self, plugin: ServicePlugin, config: Dict) -> None: - """Validate that miner meets plugin requirements""" - requirements = plugin.get_hardware_requirements() - - # This would check against actual miner hardware - # For now, just log the requirements - logger.debug(f"Hardware requirements for {plugin.service_id}: {requirements}") - - async def _sync_loop(self) -> None: - """Background sync loop""" - while True: - await asyncio.sleep(self._update_interval) - await self.sync_services() - - async def execute_service(self, service_id: str, request: Dict) -> Dict: - """Execute a service request""" - try: - # Check if service is enabled - if service_id not in self.enabled_services: - raise PluginNotFoundError(f"Service {service_id} is not enabled") - - # Get plugin - plugin = registry.get_plugin(service_id) - if not plugin: - raise PluginNotFoundError(f"No plugin loaded for service: {service_id}") - - # Execute request - result = await plugin.execute(request) - - # Convert result to dict - return { - "success": result.success, - "data": result.data, - "error": result.error, - "metrics": result.metrics, - "execution_time": result.execution_time - } - - except Exception as e: - logger.error(f"Failed to execute service {service_id}: {e}") - return { - "success": False, - "error": str(e) - } - - def get_enabled_services(self) -> List[str]: - """Get list of enabled services""" - return list(self.enabled_services) - - def get_service_status(self) -> Dict[str, Dict]: - """Get status of all services""" - status = {} - - for service_id in registry.list_plugins(): - plugin = registry.get_plugin(service_id) - status[service_id] = { - "enabled": service_id in self.enabled_services, - "loaded": plugin is not None, - "config": self.service_configs.get(service_id, {}), - "capabilities": plugin.capabilities if plugin else [] - } - - return status - - async def health_check(self) -> Dict[str, bool]: - """Health check all enabled services""" - results = {} - - for service_id in self.enabled_services: - plugin = registry.get_plugin(service_id) - if plugin: - try: - results[service_id] = await plugin.health_check() - except Exception as e: - logger.error(f"Health check failed for {service_id}: {e}") - results[service_id] = False - else: - results[service_id] = False - - return results - - async def stop(self) -> None: - """Stop the discovery service""" - logger.info("Stopping service discovery") - registry.cleanup_all() diff --git a/apps/miner-node/plugins/exceptions.py b/apps/miner-node/plugins/exceptions.py deleted file mode 100644 index 933d14f8..00000000 --- a/apps/miner-node/plugins/exceptions.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Plugin system exceptions -""" - - -class PluginError(Exception): - """Base exception for plugin errors""" - pass - - -class PluginNotFoundError(PluginError): - """Raised when a plugin is not found""" - pass - - -class PluginValidationError(PluginError): - """Raised when plugin validation fails""" - pass - - -class PluginExecutionError(PluginError): - """Raised when plugin execution fails""" - pass diff --git a/apps/miner-node/plugins/ffmpeg.py b/apps/miner-node/plugins/ffmpeg.py deleted file mode 100644 index df627da6..00000000 --- a/apps/miner-node/plugins/ffmpeg.py +++ /dev/null @@ -1,318 +0,0 @@ -""" -FFmpeg video processing plugin -""" - -import asyncio -import os -import subprocess -import tempfile -from typing import Dict, Any, List -import time - -from .base import ServicePlugin, PluginResult -from .exceptions import PluginExecutionError - - -class FFmpegPlugin(ServicePlugin): - """Plugin for FFmpeg video processing""" - - def __init__(self): - super().__init__() - self.service_id = "ffmpeg" - self.name = "FFmpeg Video Processing" - self.version = "1.0.0" - self.description = "Transcode and process video files using FFmpeg" - self.capabilities = ["transcode", "resize", "compress", "convert"] - - def setup(self) -> None: - """Initialize FFmpeg dependencies""" - # Check for ffmpeg installation - try: - subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True) - self.ffmpeg_path = "ffmpeg" - except (subprocess.CalledProcessError, FileNotFoundError): - raise PluginExecutionError("FFmpeg not found. Install FFmpeg for video processing") - - # Check for NVIDIA GPU support - try: - result = subprocess.run( - ["ffmpeg", "-hide_banner", "-encoders"], - capture_output=True, - text=True, - check=True - ) - self.gpu_acceleration = "h264_nvenc" in result.stdout - except subprocess.CalledProcessError: - self.gpu_acceleration = False - - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate FFmpeg request parameters""" - errors = [] - - # Check required parameters - if "input_url" not in request and "input_file" not in request: - errors.append("Either 'input_url' or 'input_file' must be provided") - - # Validate output format - output_format = request.get("output_format", "mp4") - valid_formats = ["mp4", "avi", "mov", "mkv", "webm", "flv"] - if output_format not in valid_formats: - errors.append(f"Invalid output format. Must be one of: {', '.join(valid_formats)}") - - # Validate codec - codec = request.get("codec", "h264") - valid_codecs = ["h264", "h265", "vp9", "av1", "mpeg4"] - if codec not in valid_codecs: - errors.append(f"Invalid codec. Must be one of: {', '.join(valid_codecs)}") - - # Validate resolution - resolution = request.get("resolution") - if resolution: - valid_resolutions = ["720p", "1080p", "1440p", "4K", "8K"] - if resolution not in valid_resolutions: - errors.append(f"Invalid resolution. Must be one of: {', '.join(valid_resolutions)}") - - # Validate bitrate - bitrate = request.get("bitrate") - if bitrate: - if not isinstance(bitrate, str) or not bitrate.endswith(("k", "M")): - errors.append("Bitrate must end with 'k' or 'M' (e.g., '1000k', '5M')") - - # Validate frame rate - fps = request.get("fps") - if fps: - if not isinstance(fps, (int, float)) or fps < 1 or fps > 120: - errors.append("FPS must be between 1 and 120") - - return errors - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for FFmpeg""" - return { - "gpu": "optional", - "vram_gb": 2, - "ram_gb": 8, - "storage_gb": 10 - } - - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute FFmpeg processing""" - start_time = time.time() - - try: - # Validate request - errors = self.validate_request(request) - if errors: - return PluginResult( - success=False, - error=f"Validation failed: {'; '.join(errors)}" - ) - - # Get parameters - input_source = request.get("input_url") or request.get("input_file") - output_format = request.get("output_format", "mp4") - codec = request.get("codec", "h264") - resolution = request.get("resolution") - bitrate = request.get("bitrate") - fps = request.get("fps") - gpu_acceleration = request.get("gpu_acceleration", self.gpu_acceleration) - - # Get input file - input_file = await self._get_input_file(input_source) - - # Build FFmpeg command - cmd = self._build_ffmpeg_command( - input_file=input_file, - output_format=output_format, - codec=codec, - resolution=resolution, - bitrate=bitrate, - fps=fps, - gpu_acceleration=gpu_acceleration - ) - - # Execute FFmpeg - output_file = await self._execute_ffmpeg(cmd) - - # Get output file info - output_info = await self._get_video_info(output_file) - - # Clean up input file if downloaded - if input_source != request.get("input_file"): - os.unlink(input_file) - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "output_file": output_file, - "output_info": output_info, - "parameters": { - "codec": codec, - "resolution": resolution, - "bitrate": bitrate, - "fps": fps, - "gpu_acceleration": gpu_acceleration - } - }, - metrics={ - "input_size": os.path.getsize(input_file), - "output_size": os.path.getsize(output_file), - "compression_ratio": os.path.getsize(output_file) / os.path.getsize(input_file), - "processing_time": execution_time, - "real_time_factor": output_info.get("duration", 0) / execution_time if execution_time > 0 else 0 - }, - execution_time=execution_time - ) - - except Exception as e: - return PluginResult( - success=False, - error=str(e), - execution_time=time.time() - start_time - ) - - async def _get_input_file(self, source: str) -> str: - """Get input file from URL or path""" - if source.startswith(("http://", "https://")): - # Download from URL - import requests - - response = requests.get(source, stream=True) - response.raise_for_status() - - # Save to temporary file - with tempfile.NamedTemporaryFile(delete=False, suffix=".mp4") as f: - for chunk in response.iter_content(chunk_size=8192): - f.write(chunk) - return f.name - else: - # Local file - if not os.path.exists(source): - raise PluginExecutionError(f"Input file not found: {source}") - return source - - def _build_ffmpeg_command( - self, - input_file: str, - output_format: str, - codec: str, - resolution: Optional[str], - bitrate: Optional[str], - fps: Optional[float], - gpu_acceleration: bool - ) -> List[str]: - """Build FFmpeg command""" - cmd = [self.ffmpeg_path, "-i", input_file] - - # Add codec - if gpu_acceleration and codec == "h264": - cmd.extend(["-c:v", "h264_nvenc"]) - cmd.extend(["-preset", "fast"]) - elif gpu_acceleration and codec == "h265": - cmd.extend(["-c:v", "hevc_nvenc"]) - cmd.extend(["-preset", "fast"]) - else: - cmd.extend(["-c:v", codec]) - - # Add resolution - if resolution: - resolution_map = { - "720p": ("1280", "720"), - "1080p": ("1920", "1080"), - "1440p": ("2560", "1440"), - "4K": ("3840", "2160"), - "8K": ("7680", "4320") - } - width, height = resolution_map.get(resolution, (None, None)) - if width and height: - cmd.extend(["-s", f"{width}x{height}"]) - - # Add bitrate - if bitrate: - cmd.extend(["-b:v", bitrate]) - cmd.extend(["-b:a", "128k"]) # Audio bitrate - - # Add FPS - if fps: - cmd.extend(["-r", str(fps)]) - - # Add audio codec - cmd.extend(["-c:a", "aac"]) - - # Output file - output_file = tempfile.mktemp(suffix=f".{output_format}") - cmd.append(output_file) - - return cmd - - async def _execute_ffmpeg(self, cmd: List[str]) -> str: - """Execute FFmpeg command""" - process = await asyncio.create_subprocess_exec( - *cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE - ) - - stdout, stderr = await process.communicate() - - if process.returncode != 0: - error_msg = stderr.decode() if stderr else "FFmpeg failed" - raise PluginExecutionError(f"FFmpeg error: {error_msg}") - - # Output file is the last argument - return cmd[-1] - - async def _get_video_info(self, video_file: str) -> Dict[str, Any]: - """Get video file information""" - cmd = [ - "ffprobe", - "-v", "quiet", - "-print_format", "json", - "-show_format", - "-show_streams", - video_file - ] - - process = await asyncio.create_subprocess_exec( - *cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE - ) - - stdout, stderr = await process.communicate() - - if process.returncode != 0: - return {} - - import json - probe_data = json.loads(stdout.decode()) - - # Extract relevant info - video_stream = next( - (s for s in probe_data.get("streams", []) if s.get("codec_type") == "video"), - {} - ) - - return { - "duration": float(probe_data.get("format", {}).get("duration", 0)), - "size": int(probe_data.get("format", {}).get("size", 0)), - "width": video_stream.get("width"), - "height": video_stream.get("height"), - "fps": eval(video_stream.get("r_frame_rate", "0/1")), - "codec": video_stream.get("codec_name"), - "bitrate": int(probe_data.get("format", {}).get("bit_rate", 0)) - } - - async def health_check(self) -> bool: - """Check FFmpeg health""" - try: - result = subprocess.run( - ["ffmpeg", "-version"], - capture_output=True, - check=True - ) - return True - except subprocess.CalledProcessError: - return False diff --git a/apps/miner-node/plugins/llm_inference.py b/apps/miner-node/plugins/llm_inference.py deleted file mode 100644 index a38e0cb0..00000000 --- a/apps/miner-node/plugins/llm_inference.py +++ /dev/null @@ -1,321 +0,0 @@ -""" -LLM inference plugin -""" - -import asyncio -from typing import Dict, Any, List, Optional -import time - -from .base import GPUPlugin, PluginResult -from .exceptions import PluginExecutionError - - -class LLMPlugin(GPUPlugin): - """Plugin for Large Language Model inference""" - - def __init__(self): - super().__init__() - self.service_id = "llm_inference" - self.name = "LLM Inference" - self.version = "1.0.0" - self.description = "Run inference on large language models" - self.capabilities = ["generate", "stream", "chat"] - self._model_cache = {} - - def setup(self) -> None: - """Initialize LLM dependencies""" - super().setup() - - # Check for transformers installation - try: - from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline - self.transformers = AutoModelForCausalLM - self.AutoTokenizer = AutoTokenizer - self.pipeline = pipeline - except ImportError: - raise PluginExecutionError("Transformers not installed. Install with: pip install transformers accelerate") - - # Check for torch - try: - import torch - self.torch = torch - except ImportError: - raise PluginExecutionError("PyTorch not installed. Install with: pip install torch") - - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate LLM request parameters""" - errors = [] - - # Check required parameters - if "prompt" not in request: - errors.append("'prompt' is required") - - # Validate model - model = request.get("model", "llama-7b") - valid_models = [ - "llama-7b", - "llama-13b", - "mistral-7b", - "mixtral-8x7b", - "gpt-3.5-turbo", - "gpt-4" - ] - if model not in valid_models: - errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}") - - # Validate max_tokens - max_tokens = request.get("max_tokens", 256) - if not isinstance(max_tokens, int) or max_tokens < 1 or max_tokens > 4096: - errors.append("max_tokens must be an integer between 1 and 4096") - - # Validate temperature - temperature = request.get("temperature", 0.7) - if not isinstance(temperature, (int, float)) or temperature < 0.0 or temperature > 2.0: - errors.append("temperature must be between 0.0 and 2.0") - - # Validate top_p - top_p = request.get("top_p") - if top_p is not None and (not isinstance(top_p, (int, float)) or top_p <= 0.0 or top_p > 1.0): - errors.append("top_p must be between 0.0 and 1.0") - - return errors - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for LLM inference""" - return { - "gpu": "recommended", - "vram_gb": 8, - "ram_gb": 16, - "cuda": "recommended" - } - - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute LLM inference""" - start_time = time.time() - - try: - # Validate request - errors = self.validate_request(request) - if errors: - return PluginResult( - success=False, - error=f"Validation failed: {'; '.join(errors)}" - ) - - # Get parameters - prompt = request["prompt"] - model_name = request.get("model", "llama-7b") - max_tokens = request.get("max_tokens", 256) - temperature = request.get("temperature", 0.7) - top_p = request.get("top_p", 0.9) - do_sample = request.get("do_sample", True) - stream = request.get("stream", False) - - # Load model and tokenizer - model, tokenizer = await self._load_model(model_name) - - # Generate response - loop = asyncio.get_event_loop() - - if stream: - # Streaming generation - generator = await loop.run_in_executor( - None, - lambda: self._generate_streaming( - model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample - ) - ) - - # Collect all tokens - full_response = "" - tokens = [] - for token in generator: - tokens.append(token) - full_response += token - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "text": full_response, - "tokens": tokens, - "streamed": True - }, - metrics={ - "model": model_name, - "prompt_tokens": len(tokenizer.encode(prompt)), - "generated_tokens": len(tokens), - "tokens_per_second": len(tokens) / execution_time if execution_time > 0 else 0 - }, - execution_time=execution_time - ) - else: - # Regular generation - response = await loop.run_in_executor( - None, - lambda: self._generate( - model, tokenizer, prompt, max_tokens, temperature, top_p, do_sample - ) - ) - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "text": response, - "streamed": False - }, - metrics={ - "model": model_name, - "prompt_tokens": len(tokenizer.encode(prompt)), - "generated_tokens": len(tokenizer.encode(response)) - len(tokenizer.encode(prompt)), - "tokens_per_second": (len(tokenizer.encode(response)) - len(tokenizer.encode(prompt))) / execution_time if execution_time > 0 else 0 - }, - execution_time=execution_time - ) - - except Exception as e: - return PluginResult( - success=False, - error=str(e), - execution_time=time.time() - start_time - ) - - async def _load_model(self, model_name: str): - """Load LLM model and tokenizer with caching""" - if model_name not in self._model_cache: - loop = asyncio.get_event_loop() - - # Map model names to HuggingFace model IDs - model_map = { - "llama-7b": "meta-llama/Llama-2-7b-chat-hf", - "llama-13b": "meta-llama/Llama-2-13b-chat-hf", - "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.1", - "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1", - "gpt-3.5-turbo": "openai-gpt", # Would need OpenAI API - "gpt-4": "openai-gpt-4" # Would need OpenAI API - } - - hf_model = model_map.get(model_name, model_name) - - # Load tokenizer - tokenizer = await loop.run_in_executor( - None, - lambda: self.AutoTokenizer.from_pretrained(hf_model) - ) - - # Load model - device = "cuda" if self.torch.cuda.is_available() else "cpu" - model = await loop.run_in_executor( - None, - lambda: self.transformers.from_pretrained( - hf_model, - torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32, - device_map="auto" if device == "cuda" else None, - load_in_4bit=True if device == "cuda" and self.vram_gb < 16 else False - ) - ) - - self._model_cache[model_name] = (model, tokenizer) - - return self._model_cache[model_name] - - def _generate( - self, - model, - tokenizer, - prompt: str, - max_tokens: int, - temperature: float, - top_p: float, - do_sample: bool - ) -> str: - """Generate text without streaming""" - inputs = tokenizer(prompt, return_tensors="pt") - - if self.torch.cuda.is_available(): - inputs = {k: v.cuda() for k, v in inputs.items()} - - with self.torch.no_grad(): - outputs = model.generate( - **inputs, - max_new_tokens=max_tokens, - temperature=temperature, - top_p=top_p, - do_sample=do_sample, - pad_token_id=tokenizer.eos_token_id - ) - - # Decode only the new tokens - new_tokens = outputs[0][inputs["input_ids"].shape[1]:] - response = tokenizer.decode(new_tokens, skip_special_tokens=True) - - return response - - def _generate_streaming( - self, - model, - tokenizer, - prompt: str, - max_tokens: int, - temperature: float, - top_p: float, - do_sample: bool - ): - """Generate text with streaming""" - inputs = tokenizer(prompt, return_tensors="pt") - - if self.torch.cuda.is_available(): - inputs = {k: v.cuda() for k, v in inputs.items()} - - # Simple streaming implementation - # In production, you'd use model.generate with streamer - with self.torch.no_grad(): - for i in range(max_tokens): - outputs = model.generate( - **inputs, - max_new_tokens=1, - temperature=temperature, - top_p=top_p, - do_sample=do_sample, - pad_token_id=tokenizer.eos_token_id - ) - - new_token = outputs[0][-1:] - text = tokenizer.decode(new_token, skip_special_tokens=True) - - if text == tokenizer.eos_token: - break - - yield text - - # Update inputs for next iteration - inputs["input_ids"] = self.torch.cat([inputs["input_ids"], new_token], dim=1) - if "attention_mask" in inputs: - inputs["attention_mask"] = self.torch.cat([ - inputs["attention_mask"], - self.torch.ones((1, 1), device=inputs["attention_mask"].device) - ], dim=1) - - async def health_check(self) -> bool: - """Check LLM health""" - try: - # Try to load a small model - await self._load_model("mistral-7b") - return True - except Exception: - return False - - def cleanup(self) -> None: - """Cleanup resources""" - # Move models to CPU and clear cache - for model, _ in self._model_cache.values(): - if hasattr(model, 'to'): - model.to("cpu") - self._model_cache.clear() - - # Clear GPU cache - if self.torch.cuda.is_available(): - self.torch.cuda.empty_cache() diff --git a/apps/miner-node/plugins/registry.py b/apps/miner-node/plugins/registry.py deleted file mode 100644 index ebff6dd4..00000000 --- a/apps/miner-node/plugins/registry.py +++ /dev/null @@ -1,138 +0,0 @@ -""" -Plugin registry for managing service plugins -""" - -from typing import Dict, List, Type, Optional -import importlib -import inspect -import logging -from pathlib import Path - -from .base import ServicePlugin -from .exceptions import PluginError, PluginNotFoundError - -logger = logging.getLogger(__name__) - - -class PluginRegistry: - """Registry for managing service plugins""" - - def __init__(self): - self._plugins: Dict[str, ServicePlugin] = {} - self._plugin_classes: Dict[str, Type[ServicePlugin]] = {} - self._loaded = False - - def register(self, plugin_class: Type[ServicePlugin]) -> None: - """Register a plugin class""" - plugin_id = getattr(plugin_class, "service_id", plugin_class.__name__) - self._plugin_classes[plugin_id] = plugin_class - logger.info(f"Registered plugin class: {plugin_id}") - - def load_plugin(self, service_id: str) -> ServicePlugin: - """Load and instantiate a plugin""" - if service_id not in self._plugin_classes: - raise PluginNotFoundError(f"Plugin {service_id} not found") - - if service_id in self._plugins: - return self._plugins[service_id] - - try: - plugin_class = self._plugin_classes[service_id] - plugin = plugin_class() - plugin.setup() - self._plugins[service_id] = plugin - logger.info(f"Loaded plugin: {service_id}") - return plugin - except Exception as e: - logger.error(f"Failed to load plugin {service_id}: {e}") - raise PluginError(f"Failed to load plugin {service_id}: {e}") - - def get_plugin(self, service_id: str) -> Optional[ServicePlugin]: - """Get loaded plugin""" - return self._plugins.get(service_id) - - def unload_plugin(self, service_id: str) -> None: - """Unload a plugin""" - if service_id in self._plugins: - plugin = self._plugins[service_id] - plugin.cleanup() - del self._plugins[service_id] - logger.info(f"Unloaded plugin: {service_id}") - - def list_plugins(self) -> List[str]: - """List all registered plugin IDs""" - return list(self._plugin_classes.keys()) - - def list_loaded_plugins(self) -> List[str]: - """List all loaded plugin IDs""" - return list(self._plugins.keys()) - - async def load_all_from_directory(self, plugin_dir: Path) -> None: - """Load all plugins from a directory""" - if not plugin_dir.exists(): - logger.warning(f"Plugin directory does not exist: {plugin_dir}") - return - - for plugin_file in plugin_dir.glob("*.py"): - if plugin_file.name.startswith("_"): - continue - - module_name = plugin_file.stem - try: - # Import the module - spec = importlib.util.spec_from_file_location(module_name, plugin_file) - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) - - # Find plugin classes in the module - for name, obj in inspect.getmembers(module, inspect.isclass): - if (issubclass(obj, ServicePlugin) and - obj != ServicePlugin and - not name.startswith("_")): - self.register(obj) - logger.info(f"Auto-registered plugin from {module_name}: {name}") - - except Exception as e: - logger.error(f"Failed to load plugin from {plugin_file}: {e}") - - async def initialize(self, plugin_dir: Optional[Path] = None) -> None: - """Initialize the plugin registry""" - if self._loaded: - return - - # Load built-in plugins - from . import whisper, stable_diffusion, llm_inference, ffmpeg, blender - - self.register(whisper.WhisperPlugin) - self.register(stable_diffusion.StableDiffusionPlugin) - self.register(llm_inference.LLMPlugin) - self.register(ffmpeg.FFmpegPlugin) - self.register(blender.BlenderPlugin) - - # Load external plugins if directory provided - if plugin_dir: - await self.load_all_from_directory(plugin_dir) - - self._loaded = True - logger.info(f"Plugin registry initialized with {len(self._plugin_classes)} plugins") - - async def health_check_all(self) -> Dict[str, bool]: - """Health check all loaded plugins""" - results = {} - for service_id, plugin in self._plugins.items(): - try: - results[service_id] = await plugin.health_check() - except Exception as e: - logger.error(f"Health check failed for {service_id}: {e}") - results[service_id] = False - return results - - def cleanup_all(self) -> None: - """Cleanup all loaded plugins""" - for service_id in list(self._plugins.keys()): - self.unload_plugin(service_id) - logger.info("All plugins cleaned up") - - -# Global registry instance -registry = PluginRegistry() diff --git a/apps/miner-node/plugins/stable_diffusion.py b/apps/miner-node/plugins/stable_diffusion.py deleted file mode 100644 index 75783f51..00000000 --- a/apps/miner-node/plugins/stable_diffusion.py +++ /dev/null @@ -1,281 +0,0 @@ -""" -Stable Diffusion image generation plugin -""" - -import asyncio -import base64 -import io -from typing import Dict, Any, List -import time -import numpy as np - -from .base import GPUPlugin, PluginResult -from .exceptions import PluginExecutionError - - -class StableDiffusionPlugin(GPUPlugin): - """Plugin for Stable Diffusion image generation""" - - def __init__(self): - super().__init__() - self.service_id = "stable_diffusion" - self.name = "Stable Diffusion" - self.version = "1.0.0" - self.description = "Generate images from text prompts using Stable Diffusion" - self.capabilities = ["txt2img", "img2img"] - self._model_cache = {} - - def setup(self) -> None: - """Initialize Stable Diffusion dependencies""" - super().setup() - - # Check for diffusers installation - try: - from diffusers import StableDiffusionPipeline, StableDiffusionImg2ImgPipeline - self.diffusers = StableDiffusionPipeline - self.img2img_pipe = StableDiffusionImg2ImgPipeline - except ImportError: - raise PluginExecutionError("Diffusers not installed. Install with: pip install diffusers transformers accelerate") - - # Check for torch - try: - import torch - self.torch = torch - except ImportError: - raise PluginExecutionError("PyTorch not installed. Install with: pip install torch") - - # Check for PIL - try: - from PIL import Image - self.Image = Image - except ImportError: - raise PluginExecutionError("PIL not installed. Install with: pip install Pillow") - - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate Stable Diffusion request parameters""" - errors = [] - - # Check required parameters - if "prompt" not in request: - errors.append("'prompt' is required") - - # Validate model - model = request.get("model", "runwayml/stable-diffusion-v1-5") - valid_models = [ - "runwayml/stable-diffusion-v1-5", - "stabilityai/stable-diffusion-2-1", - "stabilityai/stable-diffusion-xl-base-1.0" - ] - if model not in valid_models: - errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}") - - # Validate dimensions - width = request.get("width", 512) - height = request.get("height", 512) - - if not isinstance(width, int) or width < 256 or width > 1024: - errors.append("Width must be an integer between 256 and 1024") - if not isinstance(height, int) or height < 256 or height > 1024: - errors.append("Height must be an integer between 256 and 1024") - - # Validate steps - steps = request.get("steps", 20) - if not isinstance(steps, int) or steps < 1 or steps > 100: - errors.append("Steps must be an integer between 1 and 100") - - # Validate guidance scale - guidance_scale = request.get("guidance_scale", 7.5) - if not isinstance(guidance_scale, (int, float)) or guidance_scale < 1.0 or guidance_scale > 20.0: - errors.append("Guidance scale must be between 1.0 and 20.0") - - # Check img2img requirements - if request.get("task") == "img2img": - if "init_image" not in request: - errors.append("'init_image' is required for img2img task") - strength = request.get("strength", 0.8) - if not isinstance(strength, (int, float)) or strength < 0.0 or strength > 1.0: - errors.append("Strength must be between 0.0 and 1.0") - - return errors - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for Stable Diffusion""" - return { - "gpu": "required", - "vram_gb": 6, - "ram_gb": 8, - "cuda": "required" - } - - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute Stable Diffusion generation""" - start_time = time.time() - - try: - # Validate request - errors = self.validate_request(request) - if errors: - return PluginResult( - success=False, - error=f"Validation failed: {'; '.join(errors)}" - ) - - # Get parameters - prompt = request["prompt"] - negative_prompt = request.get("negative_prompt", "") - model_name = request.get("model", "runwayml/stable-diffusion-v1-5") - width = request.get("width", 512) - height = request.get("height", 512) - steps = request.get("steps", 20) - guidance_scale = request.get("guidance_scale", 7.5) - num_images = request.get("num_images", 1) - seed = request.get("seed") - task = request.get("task", "txt2img") - - # Load model - pipe = await self._load_model(model_name) - - # Generate images - loop = asyncio.get_event_loop() - - if task == "img2img": - # Handle img2img - init_image_data = request["init_image"] - init_image = self._decode_image(init_image_data) - strength = request.get("strength", 0.8) - - images = await loop.run_in_executor( - None, - lambda: pipe( - prompt=prompt, - negative_prompt=negative_prompt, - image=init_image, - strength=strength, - num_inference_steps=steps, - guidance_scale=guidance_scale, - num_images_per_prompt=num_images, - generator=self._get_generator(seed) - ).images - ) - else: - # Handle txt2img - images = await loop.run_in_executor( - None, - lambda: pipe( - prompt=prompt, - negative_prompt=negative_prompt, - width=width, - height=height, - num_inference_steps=steps, - guidance_scale=guidance_scale, - num_images_per_prompt=num_images, - generator=self._get_generator(seed) - ).images - ) - - # Encode images to base64 - encoded_images = [] - for img in images: - buffer = io.BytesIO() - img.save(buffer, format="PNG") - encoded_images.append(base64.b64encode(buffer.getvalue()).decode()) - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "images": encoded_images, - "count": len(images), - "parameters": { - "prompt": prompt, - "width": width, - "height": height, - "steps": steps, - "guidance_scale": guidance_scale, - "seed": seed - } - }, - metrics={ - "model": model_name, - "task": task, - "images_generated": len(images), - "generation_time": execution_time, - "time_per_image": execution_time / len(images) - }, - execution_time=execution_time - ) - - except Exception as e: - return PluginResult( - success=False, - error=str(e), - execution_time=time.time() - start_time - ) - - async def _load_model(self, model_name: str): - """Load Stable Diffusion model with caching""" - if model_name not in self._model_cache: - loop = asyncio.get_event_loop() - - # Determine device - device = "cuda" if self.torch.cuda.is_available() else "cpu" - - # Load with attention slicing for memory efficiency - pipe = await loop.run_in_executor( - None, - lambda: self.diffusers.from_pretrained( - model_name, - torch_dtype=self.torch.float16 if device == "cuda" else self.torch.float32, - safety_checker=None, - requires_safety_checker=False - ) - ) - - pipe = pipe.to(device) - - # Enable memory optimizations - if device == "cuda": - pipe.enable_attention_slicing() - if self.vram_gb < 8: - pipe.enable_model_cpu_offload() - - self._model_cache[model_name] = pipe - - return self._model_cache[model_name] - - def _decode_image(self, image_data: str) -> 'Image': - """Decode base64 image""" - if image_data.startswith('data:image'): - # Remove data URL prefix - image_data = image_data.split(',')[1] - - image_bytes = base64.b64decode(image_data) - return self.Image.open(io.BytesIO(image_bytes)) - - def _get_generator(self, seed: Optional[int]): - """Get torch generator for reproducible results""" - if seed is not None: - return self.torch.Generator().manual_seed(seed) - return None - - async def health_check(self) -> bool: - """Check Stable Diffusion health""" - try: - # Try to load a small model - pipe = await self._load_model("runwayml/stable-diffusion-v1-5") - return pipe is not None - except Exception: - return False - - def cleanup(self) -> None: - """Cleanup resources""" - # Move models to CPU and clear cache - for pipe in self._model_cache.values(): - if hasattr(pipe, 'to'): - pipe.to("cpu") - self._model_cache.clear() - - # Clear GPU cache - if self.torch.cuda.is_available(): - self.torch.cuda.empty_cache() diff --git a/apps/miner-node/plugins/whisper.py b/apps/miner-node/plugins/whisper.py deleted file mode 100644 index 66dd41a4..00000000 --- a/apps/miner-node/plugins/whisper.py +++ /dev/null @@ -1,215 +0,0 @@ -""" -Whisper speech recognition plugin -""" - -import asyncio -import os -import tempfile -from typing import Dict, Any, List -import time - -from .base import GPUPlugin, PluginResult -from .exceptions import PluginExecutionError - - -class WhisperPlugin(GPUPlugin): - """Plugin for Whisper speech recognition""" - - def __init__(self): - super().__init__() - self.service_id = "whisper" - self.name = "Whisper Speech Recognition" - self.version = "1.0.0" - self.description = "Transcribe and translate audio files using OpenAI Whisper" - self.capabilities = ["transcribe", "translate"] - self._model_cache = {} - - def setup(self) -> None: - """Initialize Whisper dependencies""" - super().setup() - - # Check for whisper installation - try: - import whisper - self.whisper = whisper - except ImportError: - raise PluginExecutionError("Whisper not installed. Install with: pip install openai-whisper") - - # Check for ffmpeg - import subprocess - try: - subprocess.run(["ffmpeg", "-version"], capture_output=True, check=True) - except (subprocess.CalledProcessError, FileNotFoundError): - raise PluginExecutionError("FFmpeg not found. Install FFmpeg for audio processing") - - def validate_request(self, request: Dict[str, Any]) -> List[str]: - """Validate Whisper request parameters""" - errors = [] - - # Check required parameters - if "audio_url" not in request and "audio_file" not in request: - errors.append("Either 'audio_url' or 'audio_file' must be provided") - - # Validate model - model = request.get("model", "base") - valid_models = ["tiny", "base", "small", "medium", "large", "large-v2", "large-v3"] - if model not in valid_models: - errors.append(f"Invalid model. Must be one of: {', '.join(valid_models)}") - - # Validate task - task = request.get("task", "transcribe") - if task not in ["transcribe", "translate"]: - errors.append("Task must be 'transcribe' or 'translate'") - - # Validate language - if "language" in request: - language = request["language"] - if not isinstance(language, str) or len(language) != 2: - errors.append("Language must be a 2-letter language code (e.g., 'en', 'es')") - - return errors - - def get_hardware_requirements(self) -> Dict[str, Any]: - """Get hardware requirements for Whisper""" - return { - "gpu": "recommended", - "vram_gb": 2, - "ram_gb": 4, - "storage_gb": 1 - } - - async def execute(self, request: Dict[str, Any]) -> PluginResult: - """Execute Whisper transcription""" - start_time = time.time() - - try: - # Validate request - errors = self.validate_request(request) - if errors: - return PluginResult( - success=False, - error=f"Validation failed: {'; '.join(errors)}" - ) - - # Get parameters - model_name = request.get("model", "base") - task = request.get("task", "transcribe") - language = request.get("language") - temperature = request.get("temperature", 0.0) - - # Load or get cached model - model = await self._load_model(model_name) - - # Get audio file - audio_path = await self._get_audio_file(request) - - # Transcribe - loop = asyncio.get_event_loop() - - if task == "translate": - result = await loop.run_in_executor( - None, - lambda: model.transcribe( - audio_path, - task="translate", - temperature=temperature - ) - ) - else: - result = await loop.run_in_executor( - None, - lambda: model.transcribe( - audio_path, - language=language, - temperature=temperature - ) - ) - - # Clean up - if audio_path != request.get("audio_file"): - os.unlink(audio_path) - - execution_time = time.time() - start_time - - return PluginResult( - success=True, - data={ - "text": result["text"], - "language": result.get("language"), - "segments": result.get("segments", []) - }, - metrics={ - "model": model_name, - "task": task, - "audio_duration": result.get("duration"), - "processing_time": execution_time, - "real_time_factor": result.get("duration", 0) / execution_time if execution_time > 0 else 0 - }, - execution_time=execution_time - ) - - except Exception as e: - return PluginResult( - success=False, - error=str(e), - execution_time=time.time() - start_time - ) - - async def _load_model(self, model_name: str): - """Load Whisper model with caching""" - if model_name not in self._model_cache: - loop = asyncio.get_event_loop() - model = await loop.run_in_executor( - None, - lambda: self.whisper.load_model(model_name) - ) - self._model_cache[model_name] = model - - return self._model_cache[model_name] - - async def _get_audio_file(self, request: Dict[str, Any]) -> str: - """Get audio file from URL or direct file path""" - if "audio_file" in request: - return request["audio_file"] - - # Download from URL - audio_url = request["audio_url"] - - # Use requests to download - import requests - - response = requests.get(audio_url, stream=True) - response.raise_for_status() - - # Save to temporary file - suffix = self._get_audio_suffix(audio_url) - with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as f: - for chunk in response.iter_content(chunk_size=8192): - f.write(chunk) - return f.name - - def _get_audio_suffix(self, url: str) -> str: - """Get file extension from URL""" - if url.endswith('.mp3'): - return '.mp3' - elif url.endswith('.wav'): - return '.wav' - elif url.endswith('.m4a'): - return '.m4a' - elif url.endswith('.flac'): - return '.flac' - else: - return '.mp3' # Default - - async def health_check(self) -> bool: - """Check Whisper health""" - try: - # Check if we can load the tiny model - await self._load_model("tiny") - return True - except Exception: - return False - - def cleanup(self) -> None: - """Cleanup resources""" - self._model_cache.clear() diff --git a/apps/miner-node/pyproject.toml b/apps/miner-node/pyproject.toml deleted file mode 100644 index 0be65c26..00000000 --- a/apps/miner-node/pyproject.toml +++ /dev/null @@ -1,30 +0,0 @@ -[tool.poetry] -name = "aitbc-miner-node" -version = "0.1.0" -description = "AITBC miner node daemon" -authors = ["AITBC Team"] -packages = [ - { include = "aitbc_miner", from = "src" } -] - -[tool.poetry.dependencies] -python = "^3.11" -httpx = "^0.27.0" -pydantic = "^2.7.0" -pyyaml = "^6.0.1" -psutil = "^5.9.8" -aiosignal = "^1.3.1" -uvloop = { version = "^0.19.0", optional = true } -asyncio = { version = "^3.4.3", optional = true } -rich = "^13.7.1" - -[tool.poetry.extras] -uvloop = ["uvloop"] - -[tool.poetry.group.dev.dependencies] -pytest = "^8.2.0" -pytest-asyncio = "^0.23.0" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" diff --git a/apps/miner-node/src/aitbc_miner/__init__.py b/apps/miner-node/src/aitbc_miner/__init__.py deleted file mode 100644 index dfeb6304..00000000 --- a/apps/miner-node/src/aitbc_miner/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""AITBC miner node package.""" diff --git a/apps/miner-node/src/aitbc_miner/agent/__init__.py b/apps/miner-node/src/aitbc_miner/agent/__init__.py deleted file mode 100644 index 5a420716..00000000 --- a/apps/miner-node/src/aitbc_miner/agent/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Control loop and background tasks for the miner node.""" diff --git a/apps/miner-node/src/aitbc_miner/agent/control.py b/apps/miner-node/src/aitbc_miner/agent/control.py deleted file mode 100644 index ba92c8df..00000000 --- a/apps/miner-node/src/aitbc_miner/agent/control.py +++ /dev/null @@ -1,127 +0,0 @@ -from __future__ import annotations - -import asyncio -import json -from collections.abc import Callable -from typing import Optional - -from ..config import settings -from ..logging import get_logger -from ..coordinator import CoordinatorClient -from ..util.probe import collect_capabilities, collect_runtime_metrics -from ..util.backoff import compute_backoff -from ..util.fs import ensure_workspace, write_json -from ..runners import get_runner - -logger = get_logger(__name__) - - -class MinerControlLoop: - def __init__(self) -> None: - self._tasks: list[asyncio.Task[None]] = [] - self._stop_event = asyncio.Event() - self._coordinator = CoordinatorClient() - self._capabilities_snapshot = collect_capabilities(settings.max_concurrent_cpu, settings.max_concurrent_gpu) - self._current_backoff = settings.poll_interval_seconds - - async def start(self) -> None: - logger.info("Starting miner control loop", extra={"node_id": settings.node_id}) - await self._register() - self._tasks.append(asyncio.create_task(self._heartbeat_loop())) - self._tasks.append(asyncio.create_task(self._poll_loop())) - - async def stop(self) -> None: - logger.info("Stopping miner control loop") - self._stop_event.set() - for task in self._tasks: - task.cancel() - await asyncio.gather(*self._tasks, return_exceptions=True) - await self._coordinator.aclose() - - async def _register(self) -> None: - payload = { - "capabilities": self._capabilities_snapshot.capabilities, - "concurrency": self._capabilities_snapshot.concurrency, - "region": settings.region, - } - try: - resp = await self._coordinator.register(payload) - logger.info("Registered miner", extra={"resp": resp}) - except Exception as exc: - logger.exception("Failed to register miner", exc_info=exc) - raise - - async def _heartbeat_loop(self) -> None: - interval = settings.heartbeat_interval_seconds - while not self._stop_event.is_set(): - payload = { - "inflight": 0, - "status": "ONLINE", - "metadata": collect_runtime_metrics(), - } - try: - await self._coordinator.heartbeat(payload) - logger.debug("heartbeat sent") - except Exception as exc: - logger.warning("heartbeat failed", exc_info=exc) - await asyncio.sleep(interval) - - async def _poll_loop(self) -> None: - interval = settings.poll_interval_seconds - while not self._stop_event.is_set(): - payload = {"max_wait_seconds": interval} - try: - job = await self._coordinator.poll(payload) - if job: - logger.info("received job", extra={"job_id": job.get("job_id")}) - self._current_backoff = settings.poll_interval_seconds - await self._handle_job(job) - else: - interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds) - logger.debug("no job; next poll interval=%s", interval) - except Exception as exc: - logger.warning("poll failed", exc_info=exc) - interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds) - await asyncio.sleep(interval) - - async def _handle_job(self, job: dict) -> None: - job_id = job.get("job_id", "unknown") - workspace = ensure_workspace(settings.workspace_root, job_id) - runner_kind = job.get("runner", {}).get("kind", "noop") - runner = get_runner(runner_kind) - - try: - result = await runner.run(job, workspace) - except Exception as exc: - logger.exception("runner crashed", extra={"job_id": job_id, "runner": runner_kind}) - await self._coordinator.submit_failure( - job_id, - { - "error_code": "RUNTIME_ERROR", - "error_message": str(exc), - "metrics": {}, - }, - ) - return - - if result.ok: - write_json(workspace / "result.json", result.output) - try: - await self._coordinator.submit_result( - job_id, - { - "result": result.output, - "metrics": {"workspace": str(workspace)}, - }, - ) - except Exception as exc: - logger.warning("failed to submit result", extra={"job_id": job_id}, exc_info=exc) - else: - await self._coordinator.submit_failure( - job_id, - { - "error_code": result.output.get("error_code", "FAILED"), - "error_message": result.output.get("error_message", "Job failed"), - "metrics": result.output.get("metrics", {}), - }, - ) diff --git a/apps/miner-node/src/aitbc_miner/config.py b/apps/miner-node/src/aitbc_miner/config.py deleted file mode 100644 index ac95d21d..00000000 --- a/apps/miner-node/src/aitbc_miner/config.py +++ /dev/null @@ -1,40 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from typing import Optional - -from pydantic import BaseModel, Field -from pydantic_settings import BaseSettings, SettingsConfigDict - - -class MinerSettings(BaseSettings): - model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False) - - node_id: str = "node-dev-1" - coordinator_base_url: str = "http://127.0.0.1:8011/v1" - auth_token: str = "REDACTED_MINER_KEY" - region: Optional[str] = None - - workspace_root: Path = Field(default=Path("/var/lib/aitbc/miner/jobs")) - cache_root: Path = Field(default=Path("/var/lib/aitbc/miner/cache")) - - heartbeat_interval_seconds: int = 15 - heartbeat_jitter_pct: int = 10 - heartbeat_timeout_seconds: int = 60 - - poll_interval_seconds: int = 3 - max_backoff_seconds: int = 60 - - max_concurrent_cpu: int = 1 - max_concurrent_gpu: int = 1 - - enable_cli_runner: bool = True - enable_python_runner: bool = True - - allowlist_dir: Path = Field(default=Path("/etc/aitbc/miner/allowlist.d")) - - log_level: str = "INFO" - log_path: Optional[Path] = None - - -settings = MinerSettings() diff --git a/apps/miner-node/src/aitbc_miner/coordinator.py b/apps/miner-node/src/aitbc_miner/coordinator.py deleted file mode 100644 index dcc1f218..00000000 --- a/apps/miner-node/src/aitbc_miner/coordinator.py +++ /dev/null @@ -1,76 +0,0 @@ -from __future__ import annotations - -import asyncio -from typing import Any, Dict, Optional - -import httpx - -from .config import MinerSettings, settings -from .logging import get_logger - -logger = get_logger(__name__) - - -class CoordinatorClient: - """Async HTTP client for interacting with the coordinator API.""" - - def __init__(self, cfg: MinerSettings | None = None) -> None: - self.cfg = cfg or settings - self._client: Optional[httpx.AsyncClient] = None - - @property - def client(self) -> httpx.AsyncClient: - if self._client is None: - headers = { - "Authorization": f"Bearer {self.cfg.auth_token}", - "User-Agent": f"aitbc-miner/{self.cfg.node_id}", - } - timeout = httpx.Timeout(connect=5.0, read=30.0, write=10.0, pool=None) - self._client = httpx.AsyncClient(base_url=self.cfg.coordinator_base_url.rstrip("/"), headers=headers, timeout=timeout) - return self._client - - async def aclose(self) -> None: - if self._client: - await self._client.aclose() - self._client = None - - async def register(self, payload: Dict[str, Any]) -> Dict[str, Any]: - logger.debug("registering miner", extra={"payload": payload}) - resp = await self.client.post("/miners/register", json=payload) - resp.raise_for_status() - return resp.json() - - async def heartbeat(self, payload: Dict[str, Any]) -> Dict[str, Any]: - resp = await self.client.post("/miners/heartbeat", json=payload) - resp.raise_for_status() - return resp.json() - - async def poll(self, payload: Dict[str, Any]) -> Optional[Dict[str, Any]]: - resp = await self.client.post("/miners/poll", json=payload) - if resp.status_code == 204: - logger.debug("no job available") - return None - resp.raise_for_status() - return resp.json() - - async def submit_result(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]: - resp = await self.client.post(f"/miners/{job_id}/result", json=payload) - resp.raise_for_status() - return resp.json() - - async def submit_failure(self, job_id: str, payload: Dict[str, Any]) -> Dict[str, Any]: - resp = await self.client.post(f"/miners/{job_id}/fail", json=payload) - resp.raise_for_status() - return resp.json() - - async def __aenter__(self) -> "CoordinatorClient": - _ = self.client - return self - - async def __aexit__(self, exc_type, exc, tb) -> None: - await self.aclose() - - -async def backoff(base: float, max_seconds: float) -> float: - await asyncio.sleep(base) - return min(base * 2, max_seconds) diff --git a/apps/miner-node/src/aitbc_miner/logging.py b/apps/miner-node/src/aitbc_miner/logging.py deleted file mode 100644 index 6ef0b99f..00000000 --- a/apps/miner-node/src/aitbc_miner/logging.py +++ /dev/null @@ -1,25 +0,0 @@ -from __future__ import annotations - -import logging -from typing import Optional - -from .config import settings - - -def configure_logging(level: Optional[str] = None, log_path: Optional[str] = None) -> None: - log_level = getattr(logging, (level or settings.log_level).upper(), logging.INFO) - handlers: list[logging.Handler] = [logging.StreamHandler()] - if log_path: - handlers.append(logging.FileHandler(log_path)) - - logging.basicConfig( - level=log_level, - format="%(asctime)s %(levelname)s %(name)s :: %(message)s", - handlers=handlers, - ) - - -def get_logger(name: str) -> logging.Logger: - if not logging.getLogger().handlers: - configure_logging(settings.log_level, settings.log_path.as_posix() if settings.log_path else None) - return logging.getLogger(name) diff --git a/apps/miner-node/src/aitbc_miner/main.py b/apps/miner-node/src/aitbc_miner/main.py deleted file mode 100644 index 2d045756..00000000 --- a/apps/miner-node/src/aitbc_miner/main.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import asyncio -import signal -from contextlib import asynccontextmanager -from typing import AsyncIterator - -from .config import settings -from .logging import get_logger - -logger = get_logger(__name__) - - -class MinerApplication: - def __init__(self) -> None: - self._stop_event = asyncio.Event() - - async def start(self) -> None: - logger.info("Miner node starting", extra={"node_id": settings.node_id}) - # TODO: initialize capability probe, register with coordinator, start heartbeat and poll loops - await self._stop_event.wait() - - async def stop(self) -> None: - logger.info("Miner node shutting down") - self._stop_event.set() - - -@asynccontextmanager -async def miner_app() -> AsyncIterator[MinerApplication]: - app = MinerApplication() - try: - yield app - finally: - await app.stop() - - -def run() -> None: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - async def _run() -> None: - async with miner_app() as app: - loop.add_signal_handler(signal.SIGINT, lambda: asyncio.create_task(app.stop())) - loop.add_signal_handler(signal.SIGTERM, lambda: asyncio.create_task(app.stop())) - await app.start() - - loop.run_until_complete(_run()) - - -if __name__ == "__main__": # pragma: no cover - run() diff --git a/apps/miner-node/src/aitbc_miner/runners/__init__.py b/apps/miner-node/src/aitbc_miner/runners/__init__.py deleted file mode 100644 index 4e07c853..00000000 --- a/apps/miner-node/src/aitbc_miner/runners/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import annotations - -from typing import Dict - -from .base import BaseRunner -from .cli.simple import CLIRunner -from .python.noop import PythonNoopRunner -from .service import ServiceRunner - - -_RUNNERS: Dict[str, BaseRunner] = { - "cli": CLIRunner(), - "python": PythonNoopRunner(), - "noop": PythonNoopRunner(), - "service": ServiceRunner(), -} - - -def get_runner(kind: str) -> BaseRunner: - return _RUNNERS.get(kind, _RUNNERS["noop"]) diff --git a/apps/miner-node/src/aitbc_miner/runners/base.py b/apps/miner-node/src/aitbc_miner/runners/base.py deleted file mode 100644 index 7fa3ecf9..00000000 --- a/apps/miner-node/src/aitbc_miner/runners/base.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from pathlib import Path -from typing import Any, Dict - - -@dataclass -class RunnerResult: - ok: bool - output: Dict[str, Any] - artifacts: Dict[str, Path] | None = None - - -class BaseRunner: - async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult: - raise NotImplementedError diff --git a/apps/miner-node/src/aitbc_miner/runners/cli/simple.py b/apps/miner-node/src/aitbc_miner/runners/cli/simple.py deleted file mode 100644 index b2eab84f..00000000 --- a/apps/miner-node/src/aitbc_miner/runners/cli/simple.py +++ /dev/null @@ -1,62 +0,0 @@ -from __future__ import annotations - -import asyncio -from pathlib import Path -from typing import Any, Dict, List - -from ..base import BaseRunner, RunnerResult - - -class CLIRunner(BaseRunner): - async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult: - runner_cfg = job.get("runner", {}) - command: List[str] = runner_cfg.get("command", []) - if not command: - return RunnerResult( - ok=False, - output={ - "error_code": "INVALID_COMMAND", - "error_message": "runner.command is required for CLI jobs", - "metrics": {}, - }, - ) - - stdout_path = workspace / "stdout.log" - stderr_path = workspace / "stderr.log" - - process = await asyncio.create_subprocess_exec( - *command, - cwd=str(workspace), - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - ) - - stdout_bytes, stderr_bytes = await process.communicate() - stdout_path.write_bytes(stdout_bytes) - stderr_path.write_bytes(stderr_bytes) - - if process.returncode == 0: - return RunnerResult( - ok=True, - output={ - "exit_code": 0, - "stdout": stdout_path.name, - "stderr": stderr_path.name, - }, - artifacts={ - "stdout": stdout_path, - "stderr": stderr_path, - }, - ) - - return RunnerResult( - ok=False, - output={ - "error_code": "PROCESS_FAILED", - "error_message": f"command exited with code {process.returncode}", - "metrics": { - "exit_code": process.returncode, - "stderr": stderr_path.name, - }, - }, - ) diff --git a/apps/miner-node/src/aitbc_miner/runners/python/noop.py b/apps/miner-node/src/aitbc_miner/runners/python/noop.py deleted file mode 100644 index b8aaa331..00000000 --- a/apps/miner-node/src/aitbc_miner/runners/python/noop.py +++ /dev/null @@ -1,20 +0,0 @@ -from __future__ import annotations - -import asyncio -from pathlib import Path -from typing import Any, Dict - -from ..base import BaseRunner, RunnerResult - - -class PythonNoopRunner(BaseRunner): - async def run(self, job: Dict[str, Any], workspace: Path) -> RunnerResult: - await asyncio.sleep(0) - payload = job.get("payload", {}) - return RunnerResult( - ok=True, - output={ - "echo": payload, - "message": "python noop runner executed", - }, - ) diff --git a/apps/miner-node/src/aitbc_miner/runners/service.py b/apps/miner-node/src/aitbc_miner/runners/service.py deleted file mode 100644 index 2576b70b..00000000 --- a/apps/miner-node/src/aitbc_miner/runners/service.py +++ /dev/null @@ -1,118 +0,0 @@ -""" -Service runner for executing GPU service jobs via plugins -""" - -import asyncio -import json -import sys -from pathlib import Path -from typing import Dict, Any, Optional - -from .base import BaseRunner -from ...config import settings -from ...logging import get_logger - -# Add plugins directory to path -plugins_path = Path(__file__).parent.parent.parent.parent / "plugins" -sys.path.insert(0, str(plugins_path)) - -try: - from plugins.discovery import ServiceDiscovery -except ImportError: - ServiceDiscovery = None - -logger = get_logger(__name__) - - -class ServiceRunner(BaseRunner): - """Runner for GPU service jobs using the plugin system""" - - def __init__(self): - super().__init__() - self.discovery: Optional[ServiceDiscovery] = None - self._initialized = False - - async def initialize(self) -> None: - """Initialize the service discovery system""" - if self._initialized: - return - - if ServiceDiscovery is None: - raise ImportError("ServiceDiscovery not available. Check plugin installation.") - - # Create service discovery - pool_hub_url = getattr(settings, 'pool_hub_url', 'http://localhost:8001') - miner_id = getattr(settings, 'node_id', 'miner-1') - - self.discovery = ServiceDiscovery(pool_hub_url, miner_id) - await self.discovery.start() - self._initialized = True - - logger.info("Service runner initialized") - - async def run(self, job: Dict[str, Any], workspace: Path) -> Dict[str, Any]: - """Execute a service job""" - await self.initialize() - - job_id = job.get("job_id", "unknown") - - try: - # Extract service type and parameters - service_type = job.get("service_type") - if not service_type: - raise ValueError("Job missing service_type") - - # Get service parameters from job - service_params = job.get("parameters", {}) - - logger.info(f"Executing service job", extra={ - "job_id": job_id, - "service_type": service_type - }) - - # Execute via plugin system - result = await self.discovery.execute_service(service_type, service_params) - - # Save result to workspace - result_file = workspace / "result.json" - with open(result_file, "w") as f: - json.dump(result, f, indent=2) - - if result["success"]: - logger.info(f"Service job completed successfully", extra={ - "job_id": job_id, - "execution_time": result.get("execution_time") - }) - - # Return success result - return { - "status": "completed", - "result": result["data"], - "metrics": result.get("metrics", {}), - "execution_time": result.get("execution_time") - } - else: - logger.error(f"Service job failed", extra={ - "job_id": job_id, - "error": result.get("error") - }) - - # Return failure result - return { - "status": "failed", - "error": result.get("error", "Unknown error"), - "execution_time": result.get("execution_time") - } - - except Exception as e: - logger.exception("Service runner failed", extra={"job_id": job_id}) - return { - "status": "failed", - "error": str(e) - } - - async def cleanup(self) -> None: - """Cleanup resources""" - if self.discovery: - await self.discovery.stop() - self._initialized = False diff --git a/apps/miner-node/src/aitbc_miner/util/backoff.py b/apps/miner-node/src/aitbc_miner/util/backoff.py deleted file mode 100644 index 96fc56c3..00000000 --- a/apps/miner-node/src/aitbc_miner/util/backoff.py +++ /dev/null @@ -1,19 +0,0 @@ -from __future__ import annotations - -import asyncio -import random - - -def compute_backoff(base: float, factor: float, jitter_pct: float, max_seconds: float) -> float: - backoff = min(base * factor, max_seconds) - jitter = backoff * (jitter_pct / 100.0) - return max(0.0, random.uniform(backoff - jitter, backoff + jitter)) - - -def next_backoff(current: float, factor: float, jitter_pct: float, max_seconds: float) -> float: - return compute_backoff(current, factor, jitter_pct, max_seconds) - - -async def sleep_with_backoff(delay: float, factor: float, jitter_pct: float, max_seconds: float) -> float: - await asyncio.sleep(delay) - return next_backoff(delay, factor, jitter_pct, max_seconds) diff --git a/apps/miner-node/src/aitbc_miner/util/fs.py b/apps/miner-node/src/aitbc_miner/util/fs.py deleted file mode 100644 index 07e5d630..00000000 --- a/apps/miner-node/src/aitbc_miner/util/fs.py +++ /dev/null @@ -1,15 +0,0 @@ -from __future__ import annotations - -from pathlib import Path - - -def ensure_workspace(root: Path, job_id: str) -> Path: - path = root / job_id - path.mkdir(parents=True, exist_ok=True) - return path - - -def write_json(path: Path, data: dict) -> None: - import json - - path.write_text(json.dumps(data, indent=2), encoding="utf-8") diff --git a/apps/miner-node/src/aitbc_miner/util/probe.py b/apps/miner-node/src/aitbc_miner/util/probe.py deleted file mode 100644 index f44077bd..00000000 --- a/apps/miner-node/src/aitbc_miner/util/probe.py +++ /dev/null @@ -1,91 +0,0 @@ -from __future__ import annotations - -import platform -import shutil -import subprocess -import time -from dataclasses import dataclass -from typing import Any, Dict, List - -import psutil - - -@dataclass -class CapabilitySnapshot: - capabilities: Dict[str, Any] - concurrency: int - region: str | None = None - - -def collect_capabilities(max_cpu_concurrency: int, max_gpu_concurrency: int) -> CapabilitySnapshot: - cpu_count = psutil.cpu_count(logical=True) or 1 - total_mem = psutil.virtual_memory().total - gpu_info = _detect_gpus() - - capabilities: Dict[str, Any] = { - "node": platform.node(), - "python_version": platform.python_version(), - "platform": platform.platform(), - "cpu": { - "logical_cores": cpu_count, - "model": platform.processor(), - }, - "memory": { - "total_bytes": total_mem, - "total_gb": round(total_mem / (1024**3), 2), - }, - "runners": { - "cli": True, - "python": True, - }, - } - - if gpu_info: - capabilities["gpus"] = gpu_info - - concurrency = max(1, max_cpu_concurrency, max_gpu_concurrency if gpu_info else 0) - return CapabilitySnapshot(capabilities=capabilities, concurrency=concurrency) - - -def collect_runtime_metrics() -> Dict[str, Any]: - vm = psutil.virtual_memory() - load_avg = psutil.getloadavg() if hasattr(psutil, "getloadavg") else (0, 0, 0) - return { - "cpu_percent": psutil.cpu_percent(interval=None), - "load_avg": load_avg, - "memory_percent": vm.percent, - "timestamp": time.time(), - } - - -def _detect_gpus() -> List[Dict[str, Any]]: - nvidia_smi = shutil.which("nvidia-smi") - if not nvidia_smi: - return [] - try: - output = subprocess.check_output( - [ - nvidia_smi, - "--query-gpu=name,memory.total", - "--format=csv,noheader" - ], - stderr=subprocess.DEVNULL, - text=True, - ) - except (subprocess.CalledProcessError, FileNotFoundError): - return [] - - gpus: List[Dict[str, Any]] = [] - for line in output.strip().splitlines(): - parts = [p.strip() for p in line.split(",")] - if not parts: - continue - name = parts[0] - mem_mb = None - if len(parts) > 1 and parts[1].lower().endswith(" mib"): - try: - mem_mb = int(float(parts[1].split()[0])) - except ValueError: - mem_mb = None - gpus.append({"name": name, "memory_mb": mem_mb}) - return gpus diff --git a/apps/miner-node/tests/test_runners.py b/apps/miner-node/tests/test_runners.py deleted file mode 100644 index f013495b..00000000 --- a/apps/miner-node/tests/test_runners.py +++ /dev/null @@ -1,37 +0,0 @@ -import asyncio -from pathlib import Path - -import pytest - -from aitbc_miner.runners.cli.simple import CLIRunner -from aitbc_miner.runners.python.noop import PythonNoopRunner - - -@pytest.mark.asyncio -async def test_python_noop_runner(tmp_path: Path): - runner = PythonNoopRunner() - job = {"payload": {"value": 42}} - result = await runner.run(job, tmp_path) - assert result.ok - assert result.output["echo"] == job["payload"] - - -@pytest.mark.asyncio -async def test_cli_runner_success(tmp_path: Path): - runner = CLIRunner() - job = {"runner": {"command": ["echo", "hello"]}} - result = await runner.run(job, tmp_path) - assert result.ok - assert result.artifacts is not None - stdout_path = result.artifacts["stdout"] - assert stdout_path.exists() - assert stdout_path.read_text().strip() == "hello" - - -@pytest.mark.asyncio -async def test_cli_runner_invalid_command(tmp_path: Path): - runner = CLIRunner() - job = {"runner": {}} - result = await runner.run(job, tmp_path) - assert not result.ok - assert result.output["error_code"] == "INVALID_COMMAND" diff --git a/apps/wallet-cli/aitbc-wallet b/apps/wallet-cli/aitbc-wallet deleted file mode 100755 index 6f3fd8cc..00000000 --- a/apps/wallet-cli/aitbc-wallet +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/env python3 -""" -AITBC Wallet CLI - A command-line wallet for AITBC blockchain -""" - -import argparse -import json -import sys -import os -from pathlib import Path -import httpx -from datetime import datetime - -# Configuration -BLOCKCHAIN_RPC = "http://127.0.0.1:9080" -WALLET_DIR = Path.home() / ".aitbc" / "wallets" - -def print_header(): - """Print wallet CLI header""" - print("=" * 50) - print(" AITBC Blockchain Wallet CLI") - print("=" * 50) - -def check_blockchain_connection(): - """Check if connected to blockchain""" - # First check if node is running by checking metrics - try: - response = httpx.get(f"{BLOCKCHAIN_RPC}/metrics", timeout=5.0) - if response.status_code == 200: - # Node is running, now try RPC - try: - rpc_response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/head", timeout=5.0) - if rpc_response.status_code == 200: - data = rpc_response.json() - return True, data.get("height", "unknown"), data.get("hash", "unknown")[:16] + "..." - else: - return False, f"RPC endpoint error (HTTP {rpc_response.status_code})", "node_running" - except Exception as e: - return False, f"RPC error: {str(e)}", "node_running" - return False, f"Node not responding (HTTP {response.status_code})", None - except Exception as e: - return False, str(e), None - -def get_balance(address): - """Get balance for an address""" - try: - response = httpx.get(f"{BLOCKCHAIN_RPC}/rpc/getBalance/{address}", timeout=5.0) - if response.status_code == 200: - return response.json() - return {"error": f"HTTP {response.status_code}"} - except Exception as e: - return {"error": str(e)} - -def list_wallets(): - """List local wallets""" - WALLET_DIR.mkdir(parents=True, exist_ok=True) - - wallets = [] - for wallet_file in WALLET_DIR.glob("*.json"): - try: - with open(wallet_file, 'r') as f: - data = json.load(f) - wallets.append({ - "id": wallet_file.stem, - "address": data.get("address", "unknown"), - "public_key": data.get("public_key", "unknown"), - "created": data.get("created_at", "unknown") - }) - except Exception as e: - continue - return wallets - -def create_wallet(wallet_id, address=None): - """Create a new wallet file""" - WALLET_DIR.mkdir(parents=True, exist_ok=True) - - wallet_file = WALLET_DIR / f"{wallet_id}.json" - if wallet_file.exists(): - return False, "Wallet already exists" - - # Generate a mock address if not provided - if not address: - address = f"aitbc1{wallet_id}{'x' * (40 - len(wallet_id))}" - - # Generate a mock public key - public_key = f"0x{'1234567890abcdef' * 4}" - - wallet_data = { - "wallet_id": wallet_id, - "address": address, - "public_key": public_key, - "created_at": datetime.now().isoformat() + "Z", - "note": "This is a demo wallet file - not for production use" - } - - try: - with open(wallet_file, 'w') as f: - json.dump(wallet_data, f, indent=2) - return True, f"Wallet created: {wallet_file}" - except Exception as e: - return False, str(e) - -def get_block_info(height=None): - try: - if height: - url = f"{BLOCKCHAIN_RPC}/rpc/blocks/{height}" - else: - url = f"{BLOCKCHAIN_RPC}/rpc/head" - - response = httpx.get(url, timeout=5.0) - if response.status_code == 200: - return response.json() - return {"error": f"HTTP {response.status_code}"} - except Exception as e: - return {"error": str(e)} - -def main(): - parser = argparse.ArgumentParser( - description="AITBC Blockchain Wallet CLI", - formatter_class=argparse.RawDescriptionHelpFormatter, - epilog=""" -Examples: - %(prog)s status Check blockchain connection - %(prog)s list List all local wallets - %(prog)s balance
Get balance of an address - %(prog)s block Show latest block info - %(prog)s block Show specific block info - """ - ) - - subparsers = parser.add_subparsers(dest="command", help="Available commands") - - # Status command - status_parser = subparsers.add_parser("status", help="Check blockchain connection status") - - # List command - list_parser = subparsers.add_parser("list", help="List all local wallets") - - # Balance command - balance_parser = subparsers.add_parser("balance", help="Get balance for an address") - balance_parser.add_argument("address", help="Wallet address to check") - - # Block command - block_parser = subparsers.add_parser("block", help="Get block information") - block_parser.add_argument("height", nargs="?", type=int, help="Block height (optional)") - - # Create command - create_parser = subparsers.add_parser("create", help="Create a new wallet file") - create_parser.add_argument("wallet_id", help="Wallet identifier") - create_parser.add_argument("--address", help="Wallet address") - - args = parser.parse_args() - - if not args.command: - print_header() - parser.print_help() - return - - if args.command == "status": - print_header() - print("Checking blockchain connection...\n") - - connected, info, block_hash = check_blockchain_connection() - if connected: - print(f"✅ Status: CONNECTED") - print(f"📦 Node: {BLOCKCHAIN_RPC}") - print(f"🔗 Latest Block: #{info}") - print(f"🧮 Block Hash: {block_hash}") - print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") - elif block_hash == "node_running": - print(f"⚠️ Status: NODE RUNNING - RPC UNAVAILABLE") - print(f"📦 Node: {BLOCKCHAIN_RPC}") - print(f"❌ RPC Error: {info}") - print(f"💡 The blockchain node is running but RPC endpoints are not working") - print(f" This might be due to initialization or database issues") - print(f"⏰ Checked at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") - else: - print(f"❌ Status: DISCONNECTED") - print(f"📦 Node: {BLOCKCHAIN_RPC}") - print(f"⚠️ Error: {info}") - print(f"💡 Make sure the blockchain node is running on port 9080") - - elif args.command == "list": - print_header() - wallets = list_wallets() - - if wallets: - print(f"Found {len(wallets)} wallet(s) in {WALLET_DIR}:\n") - for w in wallets: - print(f"🔐 Wallet ID: {w['id']}") - print(f" Address: {w['address']}") - print(f" Public Key: {w['public_key'][:20]}...") - print(f" Created: {w['created']}") - print() - else: - print(f"No wallets found in {WALLET_DIR}") - print("\n💡 To create a wallet, use the wallet-daemon service") - - elif args.command == "balance": - print_header() - print(f"Checking balance for address: {args.address}\n") - - result = get_balance(args.address) - if "error" in result: - print(f"❌ Error: {result['error']}") - else: - balance = result.get("balance", 0) - print(f"💰 Balance: {balance} AITBC") - print(f"📍 Address: {args.address}") - - elif args.command == "block": - print_header() - if args.height: - print(f"Getting block #{args.height}...\n") - else: - print("Getting latest block...\n") - - result = get_block_info(args.height) - if "error" in result: - print(f"❌ Error: {result['error']}") - else: - print(f"📦 Block Height: {result.get('height', 'unknown')}") - print(f"🧮 Block Hash: {result.get('hash', 'unknown')}") - print(f"⏰ Timestamp: {result.get('timestamp', 'unknown')}") - print(f"👤 Proposer: {result.get('proposer', 'unknown')}") - print(f"📊 Transactions: {len(result.get('transactions', []))}") - - elif args.command == "create": - print_header() - success, message = create_wallet(args.wallet_id, args.address) - if success: - print(f"✅ {message}") - print(f"\nWallet Details:") - print(f" ID: {args.wallet_id}") - print(f" Address: {args.address or f'aitbc1{args.wallet_id}...'}") - print(f"\n💡 This is a demo wallet file for testing purposes") - print(f" Use 'aitbc-wallet list' to see all wallets") - else: - print(f"❌ Error: {message}") - - else: - parser.print_help() - -if __name__ == "__main__": - main() diff --git a/apps/wallet-cli/aitbc-wallet.1 b/apps/wallet-cli/aitbc-wallet.1 deleted file mode 100644 index 628b5426..00000000 --- a/apps/wallet-cli/aitbc-wallet.1 +++ /dev/null @@ -1,102 +0,0 @@ -.TH AITBC-WALLET "1" "December 2025" "AITBC Wallet CLI" "User Commands" -.SH NAME -aitbc-wallet \- AITBC Blockchain Wallet Command Line Interface -.SH SYNOPSIS -.B aitbc-wallet -[\fIOPTIONS\fR] \fICOMMAND\fR [\fIARGS\fR] -.SH DESCRIPTION -The AITBC Wallet CLI is a command-line tool for interacting with the AITBC blockchain. It allows you to manage wallets, check balances, and monitor blockchain status without exposing your wallet to web interfaces. -.SH COMMANDS -.TP -\fBstatus\fR -Check if the wallet is connected to the AITBC blockchain node. -.TP -\fBlist\fR -List all local wallets stored in ~/.aitbc/wallets/. -.TP -\fBbalance\fR \fIADDRESS\fR -Get the AITBC token balance for the specified address. -.TP -\fBblock\fR [\fIHEIGHT\fR] -Show information about the latest block or a specific block height. -.SH EXAMPLES -Check blockchain connection status: -.P -.RS 4 -.nf -$ aitbc-wallet status -================================================== - AITBC Blockchain Wallet CLI -================================================== -Checking blockchain connection... - -✅ Status: CONNECTED -📦 Node: http://127.0.0.1:9080 -🔗 Latest Block: #42 -🧮 Block Hash: 0x1234...abcd -⏰ Checked at: 2025-12-28 10:30:00 -.fi -.RE -.P -List all wallets: -.P -.RS 4 -.nf -$ aitbc-wallet list -================================================== - AITBC Blockchain Wallet CLI -================================================== -Found 1 wallet(s) in /home/user/.aitbc/wallets: - -🔐 Wallet ID: demo-wallet - Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c - Public Key: 0x3aaa0a91f69d886a90... - Created: 2025-12-28T10:30:00Z -.fi -.RE -.P -Check wallet balance: -.P -.RS 4 -.nf -$ aitbc-wallet balance aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c -================================================== - AITBC Blockchain Wallet CLI -================================================== -Checking balance for address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c - -💰 Balance: 1000 AITBC -📍 Address: aitbc1x7f8x9k2m3n4p5q6r7s8t9u0v1w2x3y4z5a6b7c -.fi -.RE -.SH FILES -.TP -.I ~/.aitbc/wallets/ -Directory where local wallet files are stored. -.TP -.I /usr/local/bin/aitbc-wallet -The wallet CLI executable. -.SH ENVIRONMENT -.TP -.I BLOCKCHAIN_RPC -The blockchain node RPC URL (default: http://127.0.0.1:9080). -.SH SECURITY -.P -The wallet CLI is designed with security in mind: -.RS 4 -.IP \(bu 4 -No web interface - purely command-line based -.IP \(bu 4 -Wallets stored locally in encrypted format -.IP \(bu 4 -Only connects to localhost blockchain node by default -.IP \(bu 4 -No exposure of private keys to network services -.RE -.SH BUGS -Report bugs to the AITBC project issue tracker. -.SH SEE ALSO -.BR aitbc-blockchain (8), -.BR aitbc-coordinator (8) -.SH AUTHOR -AITBC Development Team diff --git a/apps/wallet-cli/aitbc_wallet.py b/apps/wallet-cli/aitbc_wallet.py deleted file mode 100755 index f3d450d1..00000000 --- a/apps/wallet-cli/aitbc_wallet.py +++ /dev/null @@ -1,256 +0,0 @@ -#!/usr/bin/env python3 -""" -AITBC Wallet CLI - Command Line Interface for AITBC Blockchain Wallet -""" - -import argparse -import sys -import json -import os -from pathlib import Path -from typing import Optional -import httpx - -# Add parent directory to path for imports -sys.path.insert(0, str(Path(__file__).parent.parent.parent / "wallet-daemon" / "src")) - -from app.keystore.service import KeystoreService -from app.ledger_mock import SQLiteLedgerAdapter -from app.settings import Settings - - -class AITBCWallet: - """AITBC Blockchain Wallet CLI""" - - def __init__(self, wallet_dir: str = None): - self.wallet_dir = Path(wallet_dir or os.path.expanduser("~/.aitbc/wallets")) - self.wallet_dir.mkdir(parents=True, exist_ok=True) - self.keystore = KeystoreService() - self.blockchain_rpc = "http://127.0.0.1:9080" # Default blockchain node RPC - - def _get_wallet_path(self, wallet_id: str) -> Path: - """Get the path to a wallet file""" - return self.wallet_dir / f"{wallet_id}.wallet" - - def create_wallet(self, wallet_id: str, password: str) -> dict: - """Create a new wallet""" - wallet_path = self._get_wallet_path(wallet_id) - - if wallet_path.exists(): - return {"error": "Wallet already exists"} - - # Generate keypair - keypair = self.keystore.generate_keypair() - - # Store encrypted wallet - wallet_data = { - "wallet_id": wallet_id, - "public_key": keypair["public_key"], - "encrypted_private_key": keypair["encrypted_private_key"], - "salt": keypair["salt"] - } - - # Encrypt and save - self.keystore.save_wallet(wallet_path, wallet_data, password) - - return { - "wallet_id": wallet_id, - "public_key": keypair["public_key"], - "status": "created" - } - - def list_wallets(self) -> list: - """List all wallet addresses""" - wallets = [] - for wallet_file in self.wallet_dir.glob("*.wallet"): - try: - wallet_id = wallet_file.stem - # Try to read public key without decrypting - with open(wallet_file, 'rb') as f: - # This is simplified - in real implementation, we'd read metadata - wallets.append({ - "wallet_id": wallet_id, - "address": f"0x{wallet_id[:8]}...", # Simplified address format - "path": str(wallet_file) - }) - except Exception: - continue - return wallets - - def get_balance(self, wallet_id: str, password: str) -> dict: - """Get wallet balance from blockchain""" - # First unlock wallet to get public key - wallet_path = self._get_wallet_path(wallet_id) - - if not wallet_path.exists(): - return {"error": "Wallet not found"} - - try: - wallet_data = self.keystore.load_wallet(wallet_path, password) - public_key = wallet_data["public_key"] - - # Query blockchain for balance - try: - with httpx.Client() as client: - response = client.get( - f"{self.blockchain_rpc}/v1/balances/{public_key}", - timeout=5.0 - ) - if response.status_code == 200: - return response.json() - else: - return {"error": "Failed to query blockchain", "status": response.status_code} - except Exception as e: - return {"error": f"Cannot connect to blockchain: {str(e)}"} - - except Exception as e: - return {"error": f"Failed to unlock wallet: {str(e)}"} - - def check_connection(self) -> dict: - """Check if connected to blockchain""" - try: - with httpx.Client() as client: - # Try to get the latest block - response = client.get(f"{self.blockchain_rpc}/v1/blocks/head", timeout=5.0) - if response.status_code == 200: - block = response.json() - return { - "connected": True, - "blockchain_url": self.blockchain_rpc, - "latest_block": block.get("height", "unknown"), - "status": "connected" - } - else: - return { - "connected": False, - "error": f"HTTP {response.status_code}", - "status": "disconnected" - } - except Exception as e: - return { - "connected": False, - "error": str(e), - "status": "disconnected" - } - - def send_transaction(self, wallet_id: str, password: str, to_address: str, amount: float) -> dict: - """Send transaction""" - wallet_path = self._get_wallet_path(wallet_id) - - if not wallet_path.exists(): - return {"error": "Wallet not found"} - - try: - # Unlock wallet - wallet_data = self.keystore.load_wallet(wallet_path, password) - private_key = wallet_data["private_key"] - - # Create transaction - transaction = { - "from": wallet_data["public_key"], - "to": to_address, - "amount": amount, - "nonce": 0 # Would get from blockchain - } - - # Sign transaction - signature = self.keystore.sign_transaction(private_key, transaction) - transaction["signature"] = signature - - # Send to blockchain - with httpx.Client() as client: - response = client.post( - f"{self.blockchain_rpc}/v1/transactions", - json=transaction, - timeout=5.0 - ) - if response.status_code == 200: - return response.json() - else: - return {"error": f"Failed to send transaction: {response.text}"} - - except Exception as e: - return {"error": str(e)} - - -def main(): - """Main CLI entry point""" - parser = argparse.ArgumentParser(description="AITBC Blockchain Wallet CLI") - parser.add_argument("--wallet-dir", default=None, help="Wallet directory path") - - subparsers = parser.add_subparsers(dest="command", help="Available commands") - - # Create wallet - create_parser = subparsers.add_parser("create", help="Create a new wallet") - create_parser.add_argument("wallet_id", help="Wallet identifier") - create_parser.add_argument("password", help="Wallet password") - - # List wallets - subparsers.add_parser("list", help="List all wallets") - - # Get balance - balance_parser = subparsers.add_parser("balance", help="Get wallet balance") - balance_parser.add_argument("wallet_id", help="Wallet identifier") - balance_parser.add_argument("password", help="Wallet password") - - # Check connection - subparsers.add_parser("status", help="Check blockchain connection status") - - # Send transaction - send_parser = subparsers.add_parser("send", help="Send transaction") - send_parser.add_argument("wallet_id", help="Wallet identifier") - send_parser.add_argument("password", help="Wallet password") - send_parser.add_argument("to_address", help="Recipient address") - send_parser.add_argument("amount", type=float, help="Amount to send") - - args = parser.parse_args() - - if not args.command: - parser.print_help() - return - - wallet = AITBCWallet(args.wallet_dir) - - if args.command == "create": - result = wallet.create_wallet(args.wallet_id, args.password) - if "error" in result: - print(f"Error: {result['error']}", file=sys.stderr) - else: - print(f"Wallet created successfully!") - print(f"Wallet ID: {result['wallet_id']}") - print(f"Public Key: {result['public_key']}") - - elif args.command == "list": - wallets = wallet.list_wallets() - if wallets: - print("Available wallets:") - for w in wallets: - print(f" - {w['wallet_id']}: {w['address']}") - else: - print("No wallets found") - - elif args.command == "balance": - result = wallet.get_balance(args.wallet_id, args.password) - if "error" in result: - print(f"Error: {result['error']}", file=sys.stderr) - else: - print(f"Balance: {result.get('balance', 'unknown')}") - - elif args.command == "status": - result = wallet.check_connection() - if result["connected"]: - print(f"✓ Connected to blockchain at {result['blockchain_url']}") - print(f" Latest block: {result['latest_block']}") - else: - print(f"✗ Not connected: {result['error']}") - - elif args.command == "send": - result = wallet.send_transaction(args.wallet_id, args.password, args.to_address, args.amount) - if "error" in result: - print(f"Error: {result['error']}", file=sys.stderr) - else: - print(f"Transaction sent: {result.get('tx_hash', 'unknown')}") - - -if __name__ == "__main__": - main() diff --git a/apps/wallet-cli/wallet.py b/apps/wallet-cli/wallet.py deleted file mode 100755 index b78addea..00000000 --- a/apps/wallet-cli/wallet.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple AITBC Wallet CLI -""" - -import argparse -import json -import sys -import os -from pathlib import Path -import httpx -import getpass - -def check_blockchain_connection(): - """Check if connected to blockchain""" - try: - response = httpx.get("http://127.0.0.1:9080/rpc/head", timeout=5.0) - if response.status_code == 200: - data = response.json() - return True, data.get("height", "unknown") - return False, f"HTTP {response.status_code}" - except Exception as e: - return False, str(e) - -def get_balance(address): - """Get balance for an address""" - try: - response = httpx.get(f"http://127.0.0.1:9080/rpc/getBalance/{address}", timeout=5.0) - if response.status_code == 200: - return response.json() - return {"error": f"HTTP {response.status_code}"} - except Exception as e: - return {"error": str(e)} - -def list_wallets(): - """List local wallets""" - wallet_dir = Path.home() / ".aitbc" / "wallets" - wallet_dir.mkdir(parents=True, exist_ok=True) - - wallets = [] - for wallet_file in wallet_dir.glob("*.json"): - try: - with open(wallet_file, 'r') as f: - data = json.load(f) - wallets.append({ - "id": wallet_file.stem, - "address": data.get("address", "unknown"), - "public_key": data.get("public_key", "unknown")[:20] + "..." - }) - except: - continue - return wallets - -def main(): - parser = argparse.ArgumentParser(description="AITBC Wallet CLI") - subparsers = parser.add_subparsers(dest="command", help="Commands") - - # Status command - subparsers.add_parser("status", help="Check blockchain connection") - - # List command - subparsers.add_parser("list", help="List wallets") - - # Balance command - balance_parser = subparsers.add_parser("balance", help="Get balance") - balance_parser.add_argument("address", help="Wallet address") - - args = parser.parse_args() - - if args.command == "status": - connected, info = check_blockchain_connection() - if connected: - print(f"✓ Connected to AITBC Blockchain") - print(f" Latest block: {info}") - print(f" Node: http://127.0.0.1:9080") - else: - print(f"✗ Not connected: {info}") - - elif args.command == "list": - wallets = list_wallets() - if wallets: - print("Local wallets:") - for w in wallets: - print(f" {w['id']}: {w['address']}") - else: - print("No wallets found") - print(f"Wallet directory: {Path.home() / '.aitbc' / 'wallets'}") - - elif args.command == "balance": - result = get_balance(args.address) - if "error" in result: - print(f"Error: {result['error']}") - else: - balance = result.get("balance", 0) - print(f"Balance: {balance} AITBC") - - else: - parser.print_help() - -if __name__ == "__main__": - main() diff --git a/configs/systemd/aitbc-miner.service b/configs/systemd/aitbc-miner.service deleted file mode 100644 index 32696bd5..00000000 --- a/configs/systemd/aitbc-miner.service +++ /dev/null @@ -1,25 +0,0 @@ -[Unit] -Description=AITBC Miner Node -After=network-online.target -Wants=network-online.target - -[Service] -Type=simple -User=aitbc -Group=aitbc -WorkingDirectory=/opt/aitbc/apps/miner-node -EnvironmentFile=/opt/aitbc/apps/miner-node/.env -ExecStart=/opt/aitbc/apps/miner-node/.venv/bin/python -m aitbc_miner.main -Restart=always -RestartSec=3 -Nice=5 -IOSchedulingClass=best-effort -IOSchedulingPriority=6 -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=full -ProtectHome=true -ReadWritePaths=/opt/aitbc/apps/miner-node /var/log/aitbc - -[Install] -WantedBy=multi-user.target diff --git a/docs/.github/workflows/deploy-docs.yml b/docs/.github/workflows/deploy-docs.yml deleted file mode 100644 index 015cab5a..00000000 --- a/docs/.github/workflows/deploy-docs.yml +++ /dev/null @@ -1,115 +0,0 @@ -name: Deploy Documentation - -on: - push: - branches: [ main, develop ] - paths: [ 'docs/**' ] - pull_request: - branches: [ main ] - paths: [ 'docs/**' ] - workflow_dispatch: - -permissions: - contents: read - pages: write - id-token: write - -concurrency: - group: "pages" - cancel-in-progress: false - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.11' - - - name: Install dependencies - run: | - pip install -r docs/requirements.txt - - - name: Generate OpenAPI specs - run: | - cd docs - python scripts/generate_openapi.py - - - name: Build documentation - run: | - cd docs - mkdocs build --strict - - - name: Upload artifact - uses: actions/upload-pages-artifact@v2 - with: - path: docs/site - - deploy: - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - runs-on: ubuntu-latest - needs: build - if: github.ref == 'refs/heads/main' - steps: - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v2 - - # Deploy to staging for develop branch - deploy-staging: - runs-on: ubuntu-latest - needs: build - if: github.ref == 'refs/heads/develop' - steps: - - name: Deploy to Staging - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./docs/site - destination_dir: staging - user_name: github-actions[bot] - user_email: github-actions[bot]@users.noreply.github.com - - # Deploy to production S3 - deploy-production: - runs-on: ubuntu-latest - needs: build - if: github.ref == 'refs/heads/main' - environment: production - steps: - - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - - name: Deploy to S3 - run: | - aws s3 sync docs/site/ s3://docs.aitbc.io/ --delete - aws cloudfront create-invalidation --distribution-id ${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }} --paths "/*" - - # Notify on deployment - notify: - runs-on: ubuntu-latest - needs: [deploy, deploy-production] - if: always() - steps: - - name: Notify Discord - uses: rjstone/discord-webhook-notify@v1 - with: - severity: info - text: "Documentation deployment completed" - description: | - Build: ${{ needs.build.result }} - Deploy: ${{ needs.deploy.result }} - Production: ${{ needs.deploy-production.result }} - webhookUrl: ${{ secrets.DISCORD_WEBHOOK }} diff --git a/docs/.pages b/docs/.pages deleted file mode 100644 index 24a4cedd..00000000 --- a/docs/.pages +++ /dev/null @@ -1,87 +0,0 @@ -# .pages configuration for awesome-pages plugin - -home: index.md -format: standard -ordering: - asc: title - -sections: - - title: Getting Started - icon: material/rocket-launch - children: - - getting-started/introduction.md - - getting-started/quickstart.md - - getting-started/installation.md - - getting-started/architecture.md - - - title: User Guide - icon: material/account-group - children: - - user-guide/overview.md - - user-guide/creating-jobs.md - - user-guide/marketplace.md - - user-guide/explorer.md - - user-guide/wallet-management.md - - - title: Developer Guide - icon: material/code-tags - children: - - developer-guide/overview.md - - developer-guide/setup.md - - developer-guide/api-authentication.md - - title: SDKs - icon: material/package-variant - children: - - developer-guide/sdks/python.md - - developer-guide/sdks/javascript.md - - developer-guide/examples.md - - developer-guide/contributing.md - - - title: API Reference - icon: material/api - children: - - title: Coordinator API - icon: material/server - children: - - api/coordinator/overview.md - - api/coordinator/authentication.md - - api/coordinator/endpoints.md - - api/coordinator/openapi.md - - title: Blockchain Node API - icon: material/link-variant - children: - - api/blockchain/overview.md - - api/blockchain/websocket.md - - api/blockchain/jsonrpc.md - - api/blockchain/openapi.md - - title: Wallet Daemon API - icon: material/wallet - children: - - api/wallet/overview.md - - api/wallet/endpoints.md - - api/wallet/openapi.md - - - title: Operations - icon: material/cog - children: - - operations/deployment.md - - operations/monitoring.md - - operations/security.md - - operations/backup-restore.md - - operations/troubleshooting.md - - - title: Tutorials - icon: material/school - children: - - tutorials/building-dapp.md - - tutorials/mining-setup.md - - tutorials/running-node.md - - tutorials/integration-examples.md - - - title: Resources - icon: material/information - children: - - resources/glossary.md - - resources/faq.md - - resources/support.md - - resources/changelog.md diff --git a/docs/partner-integration.md b/docs/developer/integration/partner-integration.md similarity index 100% rename from docs/partner-integration.md rename to docs/developer/integration/partner-integration.md diff --git a/docs/skills-framework.md b/docs/developer/integration/skills-framework.md similarity index 100% rename from docs/skills-framework.md rename to docs/developer/integration/skills-framework.md diff --git a/docs/localhost-testing-scenario.md b/docs/developer/testing/localhost-testing-scenario.md similarity index 100% rename from docs/localhost-testing-scenario.md rename to docs/developer/testing/localhost-testing-scenario.md diff --git a/docs/done.md b/docs/done.md index 041cff41..67f77412 100644 --- a/docs/done.md +++ b/docs/done.md @@ -219,7 +219,7 @@ This document tracks components that have been successfully deployed and are ope - Documented common issues, troubleshooting, and performance metrics - ✅ **Documentation Updates** - - Updated `docs/localhost-testing-scenario.md` with CLI wrapper usage + - Updated `docs/developer/testing/localhost-testing-scenario.md` with CLI wrapper usage - Converted all examples to use localhost URLs (127.0.0.1) instead of production - Added host user paths and quick start commands - Documented complete testing workflow from setup to verification @@ -268,3 +268,31 @@ This document tracks components that have been successfully deployed and are ope - ✅ **Comprehensive .gitignore** - Expanded from 39 to 145 lines with organized sections - Added project-specific rules for coordinator, explorer, GPU miner + +### Repository File Audit & Cleanup +- ✅ **File Audit Document** (`docs/files.md`) + - Created comprehensive audit of all 849 repository files + - Categorized into Whitelist (60), Greylist (0), Placeholders (12), Removed (35) + - All greylist items resolved - no pending reviews + +- ✅ **Abandoned Folders Removed** (35 items total) + - `ecosystem*/` (4 folders), `enterprise-connectors/`, `research/` + - `apps/client-web/`, `apps/marketplace-ui/`, `apps/wallet-cli/` + - `apps/miner-node/`, `apps/miner-dashboard/` + - `packages/py/aitbc-core/`, `aitbc-p2p/`, `aitbc-scheduler/` + - `packages/js/ui-widgets/` + - `python-sdk/`, `windsurf/`, `configs/`, `docs/user-guide/`, `docs/bootstrap/` + - `api/`, `governance/`, `protocols/` + - 5 GPU miner variants, 3 extension variants + +- ✅ **Docs Folder Reorganization** + - Root now contains only: `done.md`, `files.md`, `roadmap.md` + - Created new subfolders: `_config/`, `reference/components/`, `reference/governance/` + - Created: `operator/deployment/`, `operator/migration/` + - Created: `developer/testing/`, `developer/integration/` + - Moved 25 files to appropriate subfolders + - Moved receipt spec: `protocols/receipts/spec.md` → `docs/reference/specs/receipt-spec.md` + +- ✅ **Roadmap Updates** + - Added Stage 19: Placeholder Content Development + - Added Stage 20: Technical Debt Remediation (blockchain-node, solidity-token, ZKReceiptVerifier) diff --git a/docs/files.md b/docs/files.md new file mode 100644 index 00000000..db8fc912 --- /dev/null +++ b/docs/files.md @@ -0,0 +1,317 @@ +# AITBC Repository File Audit + +This document categorizes all files and folders in the repository by their status: +- **Whitelist (✅)**: Active, up-to-date, essential +- **Greylist (⚠️)**: Uncertain status, may need review +- **Blacklist (❌)**: Legacy, unused, outdated, candidates for removal + +Last updated: 2026-01-24 + +--- + +## Whitelist ✅ (Active & Essential) + +### Core Applications (`apps/`) + +| Path | Status | Notes | +|------|--------|-------| +| `apps/coordinator-api/` | ✅ Active | Main API service, recently updated (Jan 2026) | +| `apps/explorer-web/` | ✅ Active | Blockchain explorer, recently updated | +| `apps/wallet-daemon/` | ✅ Active | Wallet service, deployed in production | +| `apps/trade-exchange/` | ✅ Active | Bitcoin exchange, deployed | +| `apps/zk-circuits/` | ✅ Active | ZK proof circuits, deployed | +| `apps/marketplace-web/` | ✅ Active | Marketplace frontend, deployed | + +### Scripts (`scripts/`) + +| Path | Status | Notes | +|------|--------|-------| +| `scripts/aitbc-cli.sh` | ✅ Active | Main CLI tool, heavily used | +| `scripts/gpu/gpu_miner_host.py` | ✅ Active | Production GPU miner | +| `scripts/gpu/gpu_miner_host_wrapper.sh` | ✅ Active | Systemd wrapper | +| `scripts/deploy/` | ✅ Active | Deployment scripts | +| `scripts/service/` | ✅ Active | Service management | +| `scripts/dev_services.sh` | ✅ Active | Local development | + +### Infrastructure (`infra/`, `systemd/`) + +| Path | Status | Notes | +|------|--------|-------| +| `infra/nginx/` | ✅ Active | Production nginx configs | +| `systemd/aitbc-host-gpu-miner.service` | ✅ Active | Production service | +| `systemd/coordinator-api.service` | ✅ Active | Production service | + +### Website (`website/`) + +| Path | Status | Notes | +|------|--------|-------| +| `website/docs/` | ✅ Active | HTML documentation, recently refactored | +| `website/docs/css/docs.css` | ✅ Active | Shared CSS (1232 lines) | +| `website/docs/js/theme.js` | ✅ Active | Theme toggle | +| `website/index.html` | ✅ Active | Main website | +| `website/dashboards/` | ✅ Active | Admin/miner dashboards | + +### Documentation (`docs/`) + +| Path | Status | Notes | +|------|--------|-------| +| `docs/done.md` | ✅ Active | Completion tracking | +| `docs/roadmap.md` | ✅ Active | Development roadmap | +| `docs/developer/testing/localhost-testing-scenario.md` | ✅ Active | Testing guide | +| `docs/reference/components/miner_node.md` | ✅ Active | Miner documentation | +| `docs/reference/components/coordinator_api.md` | ✅ Active | API documentation | +| `docs/developer/integration/skills-framework.md` | ✅ Active | Skills documentation | + +### Cascade Skills (`.windsurf/`) + +| Path | Status | Notes | +|------|--------|-------| +| `.windsurf/skills/blockchain-operations/` | ✅ Active | Node management skill | +| `.windsurf/skills/deploy-production/` | ✅ Active | Deployment skill | +| `.windsurf/workflows/` | ✅ Active | Workflow definitions | + +### CLI Tools (`cli/`) + +| Path | Status | Notes | +|------|--------|-------| +| `cli/client.py` | ✅ Active | Client CLI | +| `cli/miner.py` | ✅ Active | Miner CLI | +| `cli/wallet.py` | ✅ Active | Wallet CLI | +| `cli/test_ollama_gpu_provider.py` | ✅ Active | GPU testing | + +### Home Scripts (`home/`) + +| Path | Status | Notes | +|------|--------|-------| +| `home/client/` | ✅ Active | Client test scripts | +| `home/miner/` | ✅ Active | Miner test scripts | +| `home/quick_job.py` | ✅ Active | Quick job submission | +| `home/simple_job_flow.py` | ✅ Active | Job flow testing | + +### Plugins (`plugins/`) + +| Path | Status | Notes | +|------|--------|-------| +| `plugins/ollama/` | ✅ Active | Ollama integration | + +### Root Files + +| Path | Status | Notes | +|------|--------|-------| +| `README.md` | ✅ Active | Project readme | +| `LICENSE` | ✅ Active | License file | +| `.gitignore` | ✅ Active | Recently updated (145 lines) | +| `pyproject.toml` | ✅ Active | Python project config | +| `.editorconfig` | ✅ Active | Editor config | + +--- + +## Greylist ⚠️ (Needs Review) + +### Applications - Uncertain Status + +| Path | Status | Notes | +|------|--------|-------| +| `apps/blockchain-node/` | 📋 Planned | Has code, SQLModel issues - see roadmap Stage 20 | + +### Packages + +| Path | Status | Notes | +|------|--------|-------| +| `packages/solidity/aitbc-token/` | 📋 Planned | Smart contracts, deployment planned - see roadmap Stage 20 | + +### Scripts + +| Path | Status | Notes | +|------|--------|-------| +| `scripts/test/` | ✅ Keep | 7 test scripts, all current (Jan 2026) | + +### Documentation + +| Path | Status | Notes | +|------|--------|-------| +| `docs/developer/` | ✅ Keep | 6 markdown files | +| `docs/operator/` | ✅ Keep | 5 markdown files | +| `docs/user/` | ✅ Keep | 1 markdown file | +| `docs/tutorials/` | ✅ Keep | 3 markdown files | + +### Infrastructure + +| Path | Status | Notes | +|------|--------|-------| +| `infra/k8s/` | ✅ Keep | 5 yaml files (backup, cert-manager, netpol, sealed-secrets) | + +### Extensions + +| Path | Status | Notes | +|------|--------|-------| +| `extensions/aitbc-wallet-firefox/` | ✅ Keep | Firefox extension source (7 files) | +| `extensions/aitbc-wallet-firefox-v1.0.5.xpi` | ✅ Keep | Built extension package | + +### Other + +| Path | Status | Notes | +|------|--------|-------| +| `contracts/ZKReceiptVerifier.sol` | 📋 Planned | ZK verifier contract - see roadmap Stage 20 | +| `docs/reference/specs/receipt-spec.md` | ✅ Keep | Canonical receipt schema (moved from protocols/) | + +--- + +## Future Placeholders 📋 (Keep - Will Be Populated) + +These empty folders are intentional scaffolding for planned future work per the roadmap. + +| Path | Status | Roadmap Stage | +|------|--------|---------------| +| `docs/user/guides/` | 📋 Placeholder | Stage 5 - Documentation | +| `docs/developer/tutorials/` | 📋 Placeholder | Stage 5 - Documentation | +| `docs/reference/specs/` | 📋 Placeholder | Stage 5 - Documentation | +| `infra/terraform/environments/staging/` | 📋 Placeholder | Stage 5 - Infrastructure | +| `infra/terraform/environments/prod/` | 📋 Placeholder | Stage 5 - Infrastructure | +| `infra/helm/values/dev/` | 📋 Placeholder | Stage 5 - Infrastructure | +| `infra/helm/values/staging/` | 📋 Placeholder | Stage 5 - Infrastructure | +| `infra/helm/values/prod/` | 📋 Placeholder | Stage 5 - Infrastructure | +| `apps/coordinator-api/migrations/` | 📋 Placeholder | Alembic migrations | +| `apps/pool-hub/src/app/routers/` | 📋 Placeholder | Stage 3 - Pool Hub | +| `apps/pool-hub/src/app/registry/` | 📋 Placeholder | Stage 3 - Pool Hub | +| `apps/pool-hub/src/app/scoring/` | 📋 Placeholder | Stage 3 - Pool Hub | + +--- + +## Blacklist ❌ (Abandoned - Remove) + +### Abandoned Empty Folders (Created but never used) + +| Path | Status | Notes | +|------|--------|-------| +| `apps/client-web/src/` | ❌ Remove | Created Sep 2025, never implemented | +| `apps/client-web/public/` | ❌ Remove | Created Sep 2025, never implemented | +| `apps/marketplace-ui/` | ❌ Remove | Superseded by `marketplace-web` | +| `apps/wallet-cli/` | ❌ Remove | Superseded by `cli/wallet.py` | +| `packages/py/aitbc-core/src/` | ❌ Remove | Created Sep 2025, never implemented | +| `packages/py/aitbc-p2p/src/` | ❌ Remove | Created Sep 2025, never implemented | +| `packages/py/aitbc-scheduler/src/` | ❌ Remove | Created Sep 2025, never implemented | +| `packages/js/ui-widgets/src/` | ❌ Remove | Created Sep 2025, never implemented | +| `protocols/api/` | ❌ Remove | Never implemented | +| `protocols/payouts/` | ❌ Remove | Never implemented | +| `data/fixtures/` | ❌ Remove | Never populated | +| `data/samples/` | ❌ Remove | Never populated | +| `tools/mkdiagram/` | ❌ Remove | Never implemented | +| `examples/quickstart-client-python/` | ❌ Remove | Never implemented | +| `examples/quickstart-client-js/node/` | ❌ Remove | Never implemented | +| `examples/quickstart-client-js/browser/` | ❌ Remove | Never implemented | +| `examples/receipts-sign-verify/python/` | ❌ Remove | Never implemented | +| `examples/receipts-sign-verify/js/` | ❌ Remove | Never implemented | +| `scripts/env/` | ❌ Remove | Never populated | +| `windsurf/prompts/` | ❌ Remove | Superseded by `.windsurf/` | +| `windsurf/tasks/` | ❌ Remove | Superseded by `.windsurf/` | + +### Duplicate/Redundant Folders + +| Path | Status | Notes | +|------|--------|-------| +| `python-sdk/` | ❌ Duplicate | Duplicates `packages/py/aitbc-sdk/` | +| `windsurf/` | ❌ Duplicate | Superseded by `.windsurf/` | +| `configs/` | ❌ Duplicate | Empty subfolders, duplicates `infra/` and `systemd/` | +| `docs/user-guide/` | ❌ Duplicate | Duplicates `docs/user/` | + +### Ecosystem Folders (Scaffolded but Unused) + +| Path | Status | Notes | +|------|--------|-------| +| `ecosystem/` | ❌ Unused | Only has empty `academic/` subfolder | +| `ecosystem-analytics/` | ❌ Unused | Scaffolded Dec 2025, never used | +| `ecosystem-certification/` | ❌ Unused | Scaffolded Dec 2025, never used | +| `ecosystem-extensions/` | ❌ Unused | Only has template folder | +| `enterprise-connectors/` | ❌ Unused | Scaffolded Dec 2025, never used | + +### Research Folders (Scaffolded but Unused) + +| Path | Status | Notes | +|------|--------|-------| +| `research/autonomous-agents/` | ❌ Unused | Scaffolded, no active work | +| `research/consortium/` | ❌ Unused | Scaffolded, no active work | +| `research/prototypes/` | ❌ Unused | Scaffolded, no active work | +| `research/standards/` | ❌ Unused | Scaffolded, no active work | + +### Generated/Build Artifacts (Should be in .gitignore) + +| Path | Status | Notes | +|------|--------|-------| +| `packages/solidity/aitbc-token/typechain-types/` | ❌ Generated | Build artifact | +| `apps/explorer-web/dist/` | ❌ Generated | Build artifact | +| `logs/` | ❌ Generated | Runtime logs | + +--- + +## Summary Statistics + +| Category | Count | Action | +|----------|-------|--------| +| **Whitelist ✅** | ~60 items | Keep and maintain | +| **Greylist ⚠️** | 0 items | All resolved! | +| **Placeholders 📋** | 12 folders | Fill per roadmap Stage 19 | +| **Removed ❌** | 35 items | Cleaned up 2026-01-24 | + +### Completed Actions (2026-01-24) + +1. **Cleanup Done**: + - ✅ Removed 21 abandoned/duplicate folders + - ✅ Updated `.gitignore` with comprehensive rules + - ✅ Created this audit document + +2. **Additional Cleanup (2026-01-24)**: + - ✅ Removed `apps/miner-node/` (superseded by `scripts/gpu/`) + - ✅ Removed `apps/miner-dashboard/` (superseded by `website/dashboards/`) + - ✅ Removed `docs/bootstrap/` (empty) + - ✅ Removed 5 GPU miner variants (kept only `gpu_miner_host.py`) + - ✅ Removed 3 extension variants (kept only `aitbc-wallet-firefox/`) + +3. **Final Cleanup (2026-01-24)**: + - ✅ Removed `api/` folder (mock no longer needed - using live production) + - ✅ Removed `governance/` folder (too far in future) + - ✅ Removed `protocols/` folder (spec moved to docs/reference/specs/) + - ✅ Moved `protocols/receipts/spec.md` → `docs/reference/specs/receipt-spec.md` + - ✅ Added ZKReceiptVerifier and receipt spec to roadmap Stage 20 + +4. **Placeholder Plan** (see `roadmap.md` Stage 19): + - Q1 2026: Documentation folders (`docs/user/guides/`, `docs/developer/tutorials/`, `docs/reference/specs/`) + - Q2 2026: Infrastructure (`infra/terraform/`, `infra/helm/`) + - Q2 2026: Pool Hub components + +--- + +## Folder Structure Recommendation + +``` +aitbc/ +├── apps/ # Core applications +│ ├── coordinator-api/ # ✅ Keep +│ ├── explorer-web/ # ✅ Keep +│ ├── marketplace-web/ # ✅ Keep +│ ├── wallet-daemon/ # ✅ Keep +│ └── zk-circuits/ # ✅ Keep +├── cli/ # ✅ CLI tools +├── docs/ # ✅ Markdown documentation +├── infra/ # ✅ Infrastructure configs +├── packages/ # ✅ Keep (aitbc-crypto, aitbc-sdk, aitbc-token) +├── plugins/ # ✅ Keep (ollama) +├── scripts/ # ✅ Keep - organized +├── systemd/ # ✅ Keep +├── tests/ # ✅ Keep (e2e, integration, unit, security, load) +├── website/ # ✅ Keep +└── .windsurf/ # ✅ Keep +``` + +**Folders Removed (2026-01-24)**: +- ✅ `ecosystem*/` (all 4 folders) - removed +- ✅ `enterprise-connectors/` - removed +- ✅ `research/` - removed +- ✅ `python-sdk/` - removed (duplicate) +- ✅ `windsurf/` - removed (duplicate of `.windsurf/`) +- ✅ `configs/` - removed (duplicated `infra/`) +- ✅ Empty `apps/` subfolders - removed (client-web, marketplace-ui, wallet-cli) +- ✅ Empty `packages/` subfolders - removed (aitbc-core, aitbc-p2p, aitbc-scheduler, ui-widgets) +- ✅ Empty `examples/` subfolders - removed +- ✅ `tools/` - removed (empty) +- ✅ `docs/user-guide/` - removed (duplicate) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml deleted file mode 100644 index dc58dcb9..00000000 --- a/docs/mkdocs.yml +++ /dev/null @@ -1,204 +0,0 @@ -site_name: AITBC Documentation -site_description: AI Trusted Blockchain Computing Platform Documentation -site_author: AITBC Team -site_url: https://docs.aitbc.io - -# Repository -repo_name: aitbc/docs -repo_url: https://github.com/aitbc/docs -edit_uri: edit/main/docs/ - -# Copyright -copyright: Copyright © 2024 AITBC Team - -# Configuration -theme: - name: material - language: en - features: - - announce.dismiss - - content.action.edit - - content.action.view - - content.code.annotate - - content.code.copy - - content.tabs.link - - content.tooltips - - header.autohide - - navigation.expand - - navigation.footer - - navigation.indexes - - navigation.instant - - navigation.instant.prefetch - - navigation.instant.progress - - navigation.instant.scroll - - navigation.prune - - navigation.sections - - navigation.tabs - - navigation.tabs.sticky - - navigation.top - - navigation.tracking - - search.highlight - - search.share - - search.suggest - - toc.follow - - toc.integrate - palette: - - scheme: default - primary: blue - accent: blue - toggle: - icon: material/brightness-7 - name: Switch to dark mode - - scheme: slate - primary: blue - accent: blue - toggle: - icon: material/brightness-4 - name: Switch to light mode - font: - text: Roboto - code: Roboto Mono - favicon: assets/favicon.png - logo: assets/logo.png - -# Plugins -plugins: - - search: - separator: '[\s\-,:!=\[\]()"/]+|(?!\b)(?=[A-Z][a-z])|\.(?!\d)|&[lg]t;' - - minify: - minify_html: true - - git-revision-date-localized: - enable_creation_date: true - type: datetime - timezone: UTC - - awesome-pages - - glightbox - - mkdocs-video - - social: - cards_layout_options: - font_family: Roboto - -# Customization -extra: - analytics: - provider: google - property: !ENV GOOGLE_ANALYTICS_KEY - social: - - icon: fontawesome/brands/github - link: https://github.com/aitbc - - icon: fontawesome/brands/twitter - link: https://twitter.com/aitbc - - icon: fontawesome/brands/discord - link: https://discord.gg/aitbc - version: - provider: mike - default: stable - generator: false - -# Extensions -markdown_extensions: - - abbr - - admonition - - attr_list - - def_list - - footnotes - - md_in_html - - toc: - permalink: true - - pymdownx.arithmatex: - generic: true - - pymdownx.betterem: - smart_enable: all - - pymdownx.caret - - pymdownx.details - - pymdownx.emoji: - emoji_generator: !!python/name:material.extensions.emoji.to_svg - emoji_index: !!python/name:material.extensions.emoji.twemoji - - pymdownx.highlight: - anchor_linenums: true - line_spans: __span - pygments_lang_class: true - - pymdownx.inlinehilite - - pymdownx.keys - - pymdownx.magiclink: - repo_url_shorthand: true - user: aitbc - repo: docs - - pymdownx.mark - - pymdownx.smartsymbols - - pymdownx.superfences: - custom_fences: - - name: mermaid - class: mermaid - format: !!python/name:pymdownx.superfences.fence_code_format - - pymdownx.tabbed: - alternate_style: true - - pymdownx.tasklist: - custom_checkbox: true - - pymdownx.tilde - -# Navigation -nav: - - Home: index.md - - Getting Started: - - Introduction: getting-started/introduction.md - - Quickstart: getting-started/quickstart.md - - Installation: getting-started/installation.md - - Architecture: getting-started/architecture.md - - User Guide: - - Overview: user-guide/overview.md - - Trade Exchange: trade_exchange.md - - Zero-Knowledge Applications: zk-applications.md - - Creating Jobs: user-guide/creating-jobs.md - - Marketplace: user-guide/marketplace.md - - Explorer: user-guide/explorer.md - - Wallet Management: user-guide/wallet-management.md - - Developer Guide: - - Overview: developer-guide/overview.md - - Setup: developer-guide/setup.md - - API Authentication: developer-guide/api-authentication.md - - SDKs: - - Python SDK: developer-guide/sdks/python.md - - JavaScript SDK: developer-guide/sdks/javascript.md - - Examples: developer-guide/examples.md - - Contributing: developer-guide/contributing.md - - API Reference: - - Coordinator API: - - Overview: api/coordinator/overview.md - - Authentication: api/coordinator/authentication.md - - Endpoints: api/coordinator/endpoints.md - - OpenAPI Spec: api/coordinator/openapi.md - - ZK Applications API: - - Overview: api/zk/overview.md - - Endpoints: api/zk/endpoints.md - - Circuits: api/zk/circuits.md - - OpenAPI Spec: api/zk/openapi.md - - Blockchain Node API: - - Overview: api/blockchain/overview.md - - WebSocket API: api/blockchain/websocket.md - - JSON-RPC API: api/blockchain/jsonrpc.md - - OpenAPI Spec: api/blockchain/openapi.md - - Wallet Daemon API: - - Overview: api/wallet/overview.md - - Endpoints: api/wallet/endpoints.md - - OpenAPI Spec: api/wallet/openapi.md - - Operations: - - Deployment: operations/deployment.md - - Monitoring: operations/monitoring.md - - Security: operations/security.md - - Backup & Restore: operations/backup-restore.md - - Troubleshooting: operations/troubleshooting.md - - Tutorials: - - Building a DApp: tutorials/building-dapp.md - - Mining Setup: tutorials/mining-setup.md - - Running a Node: tutorials/running-node.md - - Integration Examples: tutorials/integration-examples.md - - Resources: - - Glossary: resources/glossary.md - - FAQ: resources/faq.md - - Support: resources/support.md - - Changelog: resources/changelog.md - -# Page tree -plugins: - - awesome-pages diff --git a/docs/README-CONTAINER-DEPLOYMENT.md b/docs/operator/deployment/README-CONTAINER-DEPLOYMENT.md similarity index 100% rename from docs/README-CONTAINER-DEPLOYMENT.md rename to docs/operator/deployment/README-CONTAINER-DEPLOYMENT.md diff --git a/docs/README-DOMAIN-DEPLOYMENT.md b/docs/operator/deployment/README-DOMAIN-DEPLOYMENT.md similarity index 100% rename from docs/README-DOMAIN-DEPLOYMENT.md rename to docs/operator/deployment/README-DOMAIN-DEPLOYMENT.md diff --git a/docs/nginx-domain-setup.md b/docs/operator/deployment/nginx-domain-setup.md similarity index 100% rename from docs/nginx-domain-setup.md rename to docs/operator/deployment/nginx-domain-setup.md diff --git a/docs/simple-domain-solution.md b/docs/operator/deployment/simple-domain-solution.md similarity index 100% rename from docs/simple-domain-solution.md rename to docs/operator/deployment/simple-domain-solution.md diff --git a/docs/systemd_services.md b/docs/operator/deployment/systemd_services.md similarity index 100% rename from docs/systemd_services.md rename to docs/operator/deployment/systemd_services.md diff --git a/docs/coordinator_postgresql_migration.md b/docs/operator/migration/coordinator_postgresql_migration.md similarity index 100% rename from docs/coordinator_postgresql_migration.md rename to docs/operator/migration/coordinator_postgresql_migration.md diff --git a/docs/postgresql_migration.md b/docs/operator/migration/postgresql_migration.md similarity index 100% rename from docs/postgresql_migration.md rename to docs/operator/migration/postgresql_migration.md diff --git a/docs/wallet_daemon_postgresql_migration.md b/docs/operator/migration/wallet_daemon_postgresql_migration.md similarity index 100% rename from docs/wallet_daemon_postgresql_migration.md rename to docs/operator/migration/wallet_daemon_postgresql_migration.md diff --git a/docs/blockchain_node.md b/docs/reference/components/blockchain_node.md similarity index 100% rename from docs/blockchain_node.md rename to docs/reference/components/blockchain_node.md diff --git a/docs/coordinator_api.md b/docs/reference/components/coordinator_api.md similarity index 100% rename from docs/coordinator_api.md rename to docs/reference/components/coordinator_api.md diff --git a/docs/explorer_web.md b/docs/reference/components/explorer_web.md similarity index 100% rename from docs/explorer_web.md rename to docs/reference/components/explorer_web.md diff --git a/docs/marketplace_web.md b/docs/reference/components/marketplace_web.md similarity index 100% rename from docs/marketplace_web.md rename to docs/reference/components/marketplace_web.md diff --git a/docs/miner.md b/docs/reference/components/miner.md similarity index 100% rename from docs/miner.md rename to docs/reference/components/miner.md diff --git a/docs/miner_node.md b/docs/reference/components/miner_node.md similarity index 100% rename from docs/miner_node.md rename to docs/reference/components/miner_node.md diff --git a/docs/pool_hub.md b/docs/reference/components/pool_hub.md similarity index 100% rename from docs/pool_hub.md rename to docs/reference/components/pool_hub.md diff --git a/docs/trade_exchange.md b/docs/reference/components/trade_exchange.md similarity index 100% rename from docs/trade_exchange.md rename to docs/reference/components/trade_exchange.md diff --git a/docs/wallet_daemon.md b/docs/reference/components/wallet_daemon.md similarity index 100% rename from docs/wallet_daemon.md rename to docs/reference/components/wallet_daemon.md diff --git a/docs/zk-applications.md b/docs/reference/components/zk-applications.md similarity index 100% rename from docs/zk-applications.md rename to docs/reference/components/zk-applications.md diff --git a/docs/reference/done.md b/docs/reference/done.md deleted file mode 100644 index 1b5d94c3..00000000 --- a/docs/reference/done.md +++ /dev/null @@ -1,205 +0,0 @@ -# Completed Bootstrap Tasks - -## Repository Initialization - -- Scaffolded core monorepo directories reflected in `docs/bootstrap/dirs.md`. -- Added top-level config files: `.editorconfig`, `.gitignore`, `LICENSE`, and root `README.md`. -- Created Windsurf workspace metadata under `windsurf/`. - -## Documentation - -- Authored `docs/roadmap.md` capturing staged development targets. -- Added README placeholders for primary apps under `apps/` to outline purpose and setup notes. - -## Coordinator API - -- Implemented SQLModel-backed job persistence and service layer in `apps/coordinator-api/src/app/`. -- Wired client, miner, and admin routers to coordinator services (job lifecycle, scheduling, stats). -- Added initial pytest coverage under `apps/coordinator-api/tests/test_jobs.py`. -- Added signed receipt generation, persistence (`Job.receipt`, `JobReceipt` history table), retrieval endpoints, telemetry metrics, and optional coordinator attestations. -- Persisted historical receipts via `JobReceipt`; exposed `/v1/jobs/{job_id}/receipts` endpoint and integrated canonical serialization. -- Documented receipt attestation configuration (`RECEIPT_ATTESTATION_KEY_HEX`) in `docs/run.md` and coordinator README. - -## Miner Node - -- Created coordinator client, control loop, and capability/backoff utilities in `apps/miner-node/src/aitbc_miner/`. -- Implemented CLI/Python runners and execution pipeline with result reporting. -- Added starter tests for runners in `apps/miner-node/tests/test_runners.py`. - -## Blockchain Node - -- Added websocket fan-out, disconnect cleanup, and load-test coverage in `apps/blockchain-node/tests/test_websocket.py`, ensuring gossip topics deliver reliably to multiple subscribers. - -## Directory Preparation - -- Established scaffolds for Python and JavaScript packages in `packages/py/` and `packages/js/`. -- Seeded example project directories under `examples/` for quickstart clients and receipt verification. -- Added `examples/receipts-sign-verify/fetch_and_verify.py` demonstrating coordinator receipt fetching + verification using Python SDK. - -## Python SDK - -- Created `packages/py/aitbc-sdk/` with coordinator receipt client and verification helpers consuming `aitbc_crypto` utilities. -- Added pytest coverage under `packages/py/aitbc-sdk/tests/test_receipts.py` validating miner/coordinator signature checks and client behavior. - -## Wallet Daemon - -- Added `apps/wallet-daemon/src/app/receipts/service.py` providing `ReceiptVerifierService` that fetches and validates receipts via `aitbc_sdk`. -- Created unit tests under `apps/wallet-daemon/tests/test_receipts.py` verifying service behavior. -- Implemented wallet SDK receipt ingestion + attestation surfacing in `packages/py/aitbc-sdk/src/receipts.py`, including pagination client, signature verification, and failure diagnostics with full pytest coverage. -- Hardened REST API by wiring dependency overrides in `apps/wallet-daemon/tests/test_wallet_api.py`, expanding workflow coverage (create/list/unlock/sign) and enforcing structured password policy errors consumed in CI. - -## Explorer Web - -- Initialized a Vite + TypeScript scaffold in `apps/explorer-web/` with `vite.config.ts`, `tsconfig.json`, and placeholder `src/main.ts` content. -- Installed frontend dependencies locally to unblock editor tooling and TypeScript type resolution. -- Implemented `overview` page stats rendering backed by mock block/transaction/receipt fetchers, including robust empty-state handling and TypeScript type fixes. - -## Pool Hub - -- Implemented FastAPI service scaffolding with Redis/PostgreSQL-backed repositories, match/health/metrics endpoints, and Prometheus instrumentation (`apps/pool-hub/src/poolhub/`). -- Added Alembic migrations (`apps/pool-hub/migrations/`) and async integration tests covering repositories and endpoints (`apps/pool-hub/tests/`). - -## Solidity Token - -- Implemented attested minting logic in `packages/solidity/aitbc-token/contracts/AIToken.sol` using `AccessControl` role gates and ECDSA signature recovery. -- Added Hardhat unit tests in `packages/solidity/aitbc-token/test/aitoken.test.ts` covering successful minting, replay prevention, and invalid attestor signatures. -- Configured project TypeScript settings via `packages/solidity/aitbc-token/tsconfig.json` to align Hardhat, Node, and Mocha typings for the contract test suite. - -## JavaScript SDK - -- Delivered fetch-based client wrapper with TypeScript definitions and Vitest coverage under `packages/js/aitbc-sdk/`. - -## Blockchain Node Enhancements - -- Added comprehensive WebSocket tests for blocks and transactions streams including multi-subscriber and high-volume scenarios. -- Extended PoA consensus with per-proposer block metrics and rotation tracking. -- Added latest block interval gauge and RPC error spike alerting. -- Enhanced observability with Grafana dashboards for blockchain node and coordinator overview. -- Implemented marketplace endpoints in coordinator API with explorer and marketplace routers. -- Added mock coordinator integration with enhanced telemetry capabilities. -- Created comprehensive observability documentation and alerting rules. - -## Explorer Web Production Readiness - -- Implemented Playwright end-to-end tests for live mode functionality. -- Enhanced responsive design with improved CSS layout system. -- Added comprehensive error handling and fallback mechanisms for live API responses. -- Integrated live coordinator endpoints with proper data reconciliation. - -## Marketplace Web Launch - -- Completed auth/session scaffolding for marketplace actions. -- Implemented API abstraction layer with mock/live mode toggle. -- Connected mock listings and bids to coordinator data sources. -- Added feature flags for controlled live mode rollout. - -## Cross-Chain Settlement - -- Implemented cross-chain settlement hooks with external bridges. -- Created BridgeAdapter interface for LayerZero integration. -- Implemented BridgeManager for orchestration and retry logic. -- Added settlement storage and API endpoints. -- Created cross-chain settlement documentation. - -## Python SDK Transport Abstraction - -- Designed pluggable transport abstraction layer for multi-network support. -- Implemented base Transport interface with HTTP/WebSocket transports. -- Created MultiNetworkClient for managing multiple blockchain networks. -- Updated AITBCClient to use transport abstraction with backward compatibility. -- Added transport documentation and examples. - -## GPU Service Provider Configuration - -- Extended Miner model to include service configurations. -- Created service configuration API endpoints in pool-hub. -- Built HTML/JS UI for service provider configuration. -- Added service pricing configuration and capability validation. -- Implemented service selection for GPU providers. - -## GPU Service Expansion - -- Implemented dynamic service registry framework for 30+ GPU services. -- Created service definitions for 6 categories: AI/ML, Media Processing, Scientific Computing, Data Analytics, Gaming, Development Tools. -- Built comprehensive service registry API with validation and discovery. -- Added hardware requirement checking and pricing models. -- Updated roadmap with service expansion phase documentation. - -## Stage 7 - GPU Service Expansion & Privacy Features - -### GPU Service Infrastructure -- Create dynamic service registry with JSON schema validation -- Implement service provider configuration UI with dynamic service selection -- Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems) -- Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing) -- Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics) -- Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis) -- Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering) -- Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation) -- Implement service-specific validation and hardware requirement checking - -### Privacy & Cryptography Features -- ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow -- ✅ Implement Groth16 ZK circuit for receipt hash preimage proofs -- ✅ Create ZK proof generation service in coordinator API -- ✅ Implement on-chain verification contract (ZKReceiptVerifier.sol) -- ✅ Add confidential transaction support with opt-in ciphertext storage -- ✅ Implement HSM-backed key management (Azure Key Vault, AWS KMS, Software) -- ✅ Create hybrid encryption system (AES-256-GCM + X25519) -- ✅ Implement role-based access control with time restrictions -- ✅ Create tamper-evident audit logging with chain of hashes -- ✅ Publish comprehensive threat modeling with STRIDE analysis -- ✅ Update cross-chain settlement hooks for ZK proofs and privacy levels - -### Enterprise Integration Features -- ✅ Deliver reference connectors for ERP/payment systems with Python SDK -- ✅ Implement Stripe payment connector with full charge/refund/subscription support -- ✅ Create enterprise-grade Python SDK with async support, dependency injection, metrics -- ✅ Build ERP connector base classes with plugin architecture for protocols -- ✅ Document comprehensive SLAs with uptime guarantees and support commitments -- ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation -- ✅ Implement tenant management service with lifecycle operations -- ✅ Create tenant context middleware for automatic tenant identification -- ✅ Build resource quota enforcement with Redis-backed caching -- ✅ Create usage tracking and billing metrics with tiered pricing -- ✅ Launch ecosystem certification program with SDK conformance testing -- ✅ Define Bronze/Silver/Gold certification tiers with clear requirements -- ✅ Build language-agnostic test suite with OpenAPI contract validation -- ✅ Implement security validation framework with dependency scanning -- ✅ Design public registry API for partner/SDK discovery -- ✅ Validate certification system with Stripe connector certification - -### Community & Governance Features -- ✅ Establish open RFC process with clear stages and review criteria -- ✅ Create governance website with documentation and navigation -- ✅ Set up community call schedule with multiple call types -- ✅ Design RFC template and GitHub PR template for submissions -- ✅ Implement benevolent dictator model with sunset clause -- ✅ Create hybrid governance structure (GitHub + Discord + Website) -- ✅ Document participation guidelines and code of conduct -- ✅ Establish transparency and accountability processes - -### Ecosystem Growth Initiatives -- ✅ Create hackathon organization framework with quarterly themes and bounty board -- ✅ Design grant program with hybrid approach (micro-grants + strategic grants) -- ✅ Build marketplace extension SDK with cookiecutter templates -- ✅ Create analytics tooling for ecosystem metrics and KPI tracking -- ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews -- ✅ Establish judging criteria with ecosystem impact weighting -- ✅ Create sponsor partnership framework with tiered benefits -- ✅ Design retroactive grants for proven projects -- ✅ Implement milestone-based disbursement for accountability - -### Stage 8 - Frontier R&D & Global Expansion -- ✅ Launch research consortium framework with governance model and membership tiers -- ✅ Develop hybrid PoA/PoS consensus research plan with 12-month implementation timeline -- ✅ Create scaling research plan for sharding and rollups (100K+ TPS target) -- ✅ Design ZK applications research plan for privacy-preserving AI -- ✅ Create governance research plan with liquid democracy and AI assistance -- ✅ Develop economic models research plan with sustainable tokenomics -- ✅ Implement hybrid consensus prototype demonstrating dynamic mode switching -- ✅ Create executive summary for consortium recruitment -- ✅ Prototype sharding architecture with beacon chain coordination -- ✅ Implement ZK-rollup prototype for transaction batching -- ⏳ Set up consortium legal structure and operational infrastructure -- ⏳ Recruit founding members from industry and academia diff --git a/docs/governance.md b/docs/reference/governance/governance.md similarity index 100% rename from docs/governance.md rename to docs/reference/governance/governance.md diff --git a/docs/roadmap-retrospective-template.md b/docs/reference/governance/roadmap-retrospective-template.md similarity index 100% rename from docs/roadmap-retrospective-template.md rename to docs/reference/governance/roadmap-retrospective-template.md diff --git a/docs/transparency-report-template.md b/docs/reference/governance/transparency-report-template.md similarity index 100% rename from docs/transparency-report-template.md rename to docs/reference/governance/transparency-report-template.md diff --git a/docs/reference/roadmap.md b/docs/reference/roadmap.md deleted file mode 100644 index 8749bccb..00000000 --- a/docs/reference/roadmap.md +++ /dev/null @@ -1,458 +0,0 @@ -# AITBC Development Roadmap - -This roadmap aggregates high-priority tasks derived from the bootstrap specifications in `docs/bootstrap/` and tracks progress across the monorepo. Update this document as milestones evolve. - -## Stage 1 — Upcoming Focus Areas [COMPLETED: 2025-12-22] - -- **Blockchain Node Foundations** - - ✅ Bootstrap module layout in `apps/blockchain-node/src/`. - - ✅ Implement SQLModel schemas and RPC stubs aligned with historical/attested receipts. - -- **Explorer Web Enablement** - - ✅ Finish mock integration across all pages and polish styling + mock/live toggle. - - ✅ Begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`). - -- **Marketplace Web Scaffolding** - - ✅ Scaffold Vite/vanilla frontends consuming coordinator receipt history endpoints and SDK examples. - -- **Pool Hub Services** - - ✅ Initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics. - -- **CI Enhancements** - - ✅ Add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows. - - ✅ Provide systemd unit + installer scripts under `scripts/` for streamlined deployment. - -## Stage 2 — Core Services (MVP) [COMPLETED: 2025-12-22] - -- **Coordinator API** - - ✅ Scaffold FastAPI project (`apps/coordinator-api/src/app/`). - - ✅ Implement job submission, status, result endpoints. - - ✅ Add miner registration, heartbeat, poll, result routes. - - ✅ Wire SQLite persistence for jobs, miners, receipts (historical `JobReceipt` table). - - ✅ Provide `.env.example`, `pyproject.toml`, and run scripts. - - ✅ Deploy minimal version in container with nginx proxy - -- **Miner Node** - - ✅ Implement capability probe and control loop (register → heartbeat → fetch jobs). - - ✅ Build CLI and Python runners with sandboxed work dirs (result reporting stubbed to coordinator). - -- **Blockchain Node** - - ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`). - - ✅ Harden schema parity across runtime + storage: - - Alembic baseline + follow-on migrations in `apps/blockchain-node/migrations/` now track the SQLModel schema (blocks, transactions, receipts, accounts). - - Added `Relationship` + `ForeignKey` wiring in `apps/blockchain-node/src/aitbc_chain/models.py` for block ↔ transaction ↔ receipt joins. - - Introduced hex/enum validation hooks via Pydantic validators to ensure hash integrity and safe persistence. - - ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`). - - ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`). - - ✅ Deliver WebSocket RPC + P2P gossip layer: - - ✅ Stand up WebSocket subscription endpoints (`apps/blockchain-node/src/aitbc_chain/rpc/websocket.py`) mirroring REST payloads. - - ✅ Implement pub/sub transport for block + transaction gossip backed by an in-memory broker (Starlette `Broadcast` or Redis) with configurable fan-out. - - ✅ Add integration tests and load-test harness ensuring gossip convergence and back-pressure handling. - - ✅ Ship devnet scripts (`apps/blockchain-node/scripts/`). - - ✅ Add observability hooks (JSON logging, Prometheus metrics) and integrate coordinator mock into devnet tooling. - - ✅ Expand observability dashboards + miner mock integration: - - Build Grafana dashboards for consensus health (block intervals, proposer rotation) and RPC latency (`apps/blockchain-node/observability/`). - - Expose miner mock telemetry (job throughput, error rates) via shared Prometheus registry and ingest into blockchain-node dashboards. - - Add alerting rules (Prometheus `Alertmanager`) for stalled proposers, queue saturation, and miner mock disconnects. - - Wire coordinator mock into devnet tooling to simulate real-world load and validate observability hooks. - -- **Receipt Schema** - - ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts). - - ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending). - - ✅ Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project (`packages/solidity/aitbc-token/`). - - ✅ Add deployment/test scripts and document minting flow (`packages/solidity/aitbc-token/scripts/` and `docs/run.md`). - -- **Wallet Daemon** - - ✅ Implement encrypted keystore (Argon2id + XChaCha20-Poly1305) via `KeystoreService`. - - ✅ Provide REST and JSON-RPC endpoints for wallet management and signing (`api_rest.py`, `api_jsonrpc.py`). - - ✅ Add mock ledger adapter with SQLite backend powering event history (`ledger_mock/`). - - ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures. - - ✅ Harden REST API workflows (create/list/unlock/sign) with structured password policy enforcement and deterministic pytest coverage in `apps/wallet-daemon/tests/test_wallet_api.py`. - - ✅ Implement Wallet SDK receipt ingestion + attestation surfacing: - - Added `/v1/jobs/{job_id}/receipts` client helpers with cursor pagination, retry/backoff, and summary reporting (`packages/py/aitbc-sdk/src/receipts.py`). - - Reused crypto helpers to validate miner and coordinator signatures, capturing per-key failure reasons for downstream UX. - - Surfaced aggregated attestation status (`ReceiptStatus`) and failure diagnostics for SDK + UI consumers; JS helper parity still planned. - -## Stage 3 — Pool Hub & Marketplace [COMPLETED: 2025-12-22] - -- **Pool Hub** - - ✅ Implement miner registry, scoring engine, and `/v1/match` API with Redis/PostgreSQL backing stores. - - ✅ Add observability endpoints (`/v1/health`, `/v1/metrics`) plus Prometheus instrumentation and integration tests. - -- **Marketplace Web** - - ✅ Initialize Vite project with vanilla TypeScript (`apps/marketplace-web/`). - - ✅ Build offer list, bid form, and stats cards powered by mock data fixtures (`public/mock/`). - - ✅ Provide API abstraction toggling mock/live mode (`src/lib/api.ts`) and wire coordinator endpoints. - - ✅ Validate live mode against coordinator `/v1/marketplace/*` responses and add auth feature flags for rollout. - - ✅ Deploy to production at https://aitbc.bubuit.net/marketplace/ - -- **Explorer Web** - - ✅ Initialize Vite + TypeScript project scaffold (`apps/explorer-web/`). - - ✅ Add routed pages for overview, blocks, transactions, addresses, receipts. - - ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables. - - ✅ Extend mock integrations to transactions, addresses, and receipts pages. - - ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold. - - ✅ Render overview stats from mock block/transaction/receipt summaries with graceful empty-state fallbacks. - - ✅ Validate live mode + responsive polish: - - Hit live coordinator endpoints via nginx (`/api/explorer/blocks`, `/api/explorer/transactions`, `/api/explorer/addresses`, `/api/explorer/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models. - - Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics). - - Audit responsive breakpoints (`public/css/layout.css`) and adjust grid/typography for tablet + mobile; add regression checks in Percy/Playwright snapshots. - - ✅ Deploy to production at https://aitbc.bubuit.net/explorer/ with genesis block display - -## Stage 4 — Observability & Production Polish - -- **Observability & Telemetry** - - ✅ Build Grafana dashboards for PoA consensus health (block intervals, proposer rotation cadence) leveraging `poa_last_block_interval_seconds`, `poa_proposer_rotations_total`, and per-proposer counters. - - ✅ Surface RPC latency histograms/summaries for critical endpoints (`rpc_get_head`, `rpc_send_tx`, `rpc_submit_receipt`) and add Grafana panels with SLO thresholds. - - ✅ Ingest miner mock telemetry (job throughput, failure rate) into the shared Prometheus registry and wire panels/alerts that correlate miner health with consensus metrics. - -- **Explorer Web (Live Mode)** - - ✅ Finalize live `getDataMode() === "live"` workflow: align API payload contracts, render loading/error states, and persist mock/live toggle preference. - - ✅ Expand responsive testing (tablet/mobile) and add automated visual regression snapshots prior to launch. - - ✅ Integrate Playwright smoke tests covering overview, blocks, and transactions pages in live mode. - -- **Marketplace Web (Launch Readiness)** - - ✅ Connect mock listings/bids to coordinator data sources and provide feature flags for live mode rollout. - - ✅ Implement auth/session scaffolding for marketplace actions and document API assumptions in `apps/marketplace-web/README.md`. - - ✅ Add Grafana panels monitoring marketplace API throughput and error rates once endpoints are live. - -- **Operational Hardening** - - ✅ Extend Alertmanager rules to cover RPC error spikes, proposer stalls, and miner disconnects using the new metrics. - - ✅ Document dashboard import + alert deployment steps in `docs/run.md` for operators. - - ✅ Prepare Stage 3 release checklist linking dashboards, alerts, and smoke tests prior to production cutover. - - ✅ Enable host GPU miner with coordinator proxy routing and systemd-backed coordinator service; add proxy health timer. - -## Stage 5 — Scaling & Release Readiness - -- **Infrastructure Scaling** - - ✅ Benchmark blockchain node throughput under sustained load; capture CPU/memory targets and suggest horizontal scaling thresholds. - - ✅ Build Terraform/Helm templates for dev/staging/prod environments, including Prometheus/Grafana bundles. - - ✅ Implement autoscaling policies for coordinator, miners, and marketplace services with synthetic traffic tests. - -- **Reliability & Compliance** - - ✅ Formalize backup/restore procedures for PostgreSQL, Redis, and ledger storage with scheduled jobs. - - ✅ Complete security hardening review (TLS termination, API auth, secrets management) and document mitigations in `docs/security.md`. - - ✅ Add chaos testing scripts (network partition, coordinator outage) and track mean-time-to-recovery metrics. - -- **Product Launch Checklist** - - ✅ Finalize public documentation (API references, onboarding guides) and publish to the docs portal. - - ✅ Coordinate beta release timeline, including user acceptance testing of explorer/marketplace live modes. - - ✅ Establish post-launch monitoring playbooks and on-call rotations. - -## Stage 6 — Ecosystem Expansion - -- **Cross-Chain & Interop** - - ✅ Prototype cross-chain settlement hooks leveraging external bridges; document integration patterns. - - ✅ Extend SDKs (Python/JS) with pluggable transport abstractions for multi-network support. - - 🔄 Evaluate third-party explorer/analytics integrations and publish partner onboarding guides. - -- **Marketplace Growth** - - 🔄 Launch incentive programs (staking, liquidity mining) and expose telemetry dashboards tracking campaign performance. - - 🔄 Implement governance module (proposal voting, parameter changes) and add API/UX flows to explorer/marketplace. - - 🔄 Provide SLA-backed coordinator/pool hubs with capacity planning and billing instrumentation. - -- **Developer Experience** - - ✅ Publish advanced tutorials (custom proposers, marketplace extensions) and maintain versioned API docs. - - 🔄 Integrate CI/CD pipelines with canary deployments and blue/green release automation. - - 🔄 Host quarterly architecture reviews capturing lessons learned and feeding into roadmap revisions. - -## Stage 7 — Innovation & Ecosystem Services - -- **GPU Service Expansion** - - ✅ Implement dynamic service registry framework for 30+ GPU-accelerated services - - ✅ Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems) - - ✅ Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing) - - ✅ Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics) - - ✅ Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis) - - ✅ Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering) - - ✅ Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation) - - ✅ Deploy service provider configuration UI with dynamic service selection - - ✅ Implement service-specific validation and hardware requirement checking - -- **Advanced Cryptography & Privacy** - - ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow. - - ✅ Add confidential transaction support with opt-in ciphertext storage and HSM-backed key management. - - ✅ Publish threat modeling updates and share mitigations with ecosystem partners. - -- **Enterprise Integrations** - - ✅ Deliver reference connectors for ERP/payment systems and document SLA expectations. - - ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation and billing metrics. - - ✅ Launch ecosystem certification program (SDK conformance, security best practices) with public registry. - -- **Community & Governance** - - ✅ Establish open RFC process, publish governance website, and schedule regular community calls. - - ✅ Sponsor hackathons/accelerators and provide grants for marketplace extensions and analytics tooling. - - ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews. - -## Stage 8 — Frontier R&D & Global Expansion [COMPLETED: 2025-12-28] - -- **Protocol Evolution** - - ✅ Launch research consortium exploring next-gen consensus (hybrid PoA/PoS) and finalize whitepapers. - - 🔄 Prototype sharding or rollup architectures to scale throughput beyond current limits. - - 🔄 Standardize interoperability specs with industry bodies and submit proposals for adoption. - -- **Global Rollout** - - 🔄 Establish regional infrastructure hubs (multi-cloud) with localized compliance and data residency guarantees. - - 🔄 Partner with regulators/enterprises to pilot regulated marketplaces and publish compliance playbooks. - - 🔄 Expand localization (UI, documentation, support) covering top target markets. - -- **Long-Term Sustainability** - - 🔄 Create sustainability fund for ecosystem maintenance, bug bounties, and community stewardship. - - 🔄 Define succession planning for core teams, including training programs and contributor pathways. - - 🔄 Publish bi-annual roadmap retrospectives assessing KPI alignment and revising long-term goals. - -## Stage 9 — Moonshot Initiatives [COMPLETED: 2025-12-28] - -- **Decentralized Infrastructure** - - 🔄 Transition coordinator/miner roles toward community-governed validator sets with incentive alignment. - - 🔄 Explore decentralized storage/backbone options (IPFS/Filecoin) for ledger and marketplace artifacts. - - 🔄 Prototype fully trustless marketplace settlement leveraging zero-knowledge rollups. - -- **AI & Automation** - - 🔄 Integrate AI-driven monitoring/anomaly detection for proposer health, market liquidity, and fraud detection. - - 🔄 Automate incident response playbooks with ChatOps and policy engines. - - 🔄 Launch research into autonomous agent participation (AI agents bidding/offering in the marketplace) and governance implications. -- **Global Standards Leadership** - - 🔄 Chair industry working groups defining receipt/marketplace interoperability standards. - - 🔄 Publish annual transparency reports and sustainability metrics for stakeholders. - - 🔄 Engage with academia and open-source foundations to steward long-term protocol evolution. - -### Stage 10 — Stewardship & Legacy Planning [COMPLETED: 2025-12-28] - -- **Open Governance Maturity** - - 🔄 Transition roadmap ownership to community-elected councils with transparent voting and treasury controls. - - 🔄 Codify constitutional documents (mission, values, conflict resolution) and publish public charters. - - 🔄 Implement on-chain governance modules for protocol upgrades and ecosystem-wide decisions. - -- **Educational & Outreach Programs** - - 🔄 Fund university partnerships, research chairs, and developer fellowships focused on decentralized marketplace tech. - - 🔄 Create certification tracks and mentorship programs for new validator/operators. - - 🔄 Launch annual global summit and publish proceedings to share best practices across partners. - -- **Long-Term Preservation** - - 🔄 Archive protocol specs, governance records, and cultural artifacts in decentralized storage with redundancy. - - 🔄 Establish legal/organizational frameworks to ensure continuity across jurisdictions. - - 🔄 Develop end-of-life/transition plans for legacy components, documenting deprecation strategies and migration tooling. - - -## Shared Libraries & Examples - -## Stage 11 — Trade Exchange & Token Economy [COMPLETED: 2025-12-28] - -- **Bitcoin Wallet Integration** - - ✅ Implement Bitcoin payment gateway for AITBC token purchases - - ✅ Create payment request API with unique payment addresses - - ✅ Add QR code generation for mobile payments - - ✅ Implement real-time payment monitoring with blockchain API - - ✅ Configure exchange rate: 1 BTC = 100,000 AITBC - -- **User Management System** - - ✅ Implement wallet-based authentication with session management - - ✅ Create individual user accounts with unique wallets - - ✅ Add user profile pages with transaction history - - ✅ Implement secure session tokens with 24-hour expiry - - ✅ Add login/logout functionality across all pages - -- **Trade Exchange Platform** - - ✅ Build responsive trading interface with real-time price updates - - ✅ Integrate Bitcoin payment flow with QR code display - - ✅ Add payment status monitoring and confirmation handling - - ✅ Implement AITBC token minting upon payment confirmation - - ✅ Deploy to production at https://aitbc.bubuit.net/Exchange/ - -- **API Infrastructure** - - ✅ Add user management endpoints (/api/users/*) - - ✅ Implement exchange payment endpoints (/api/exchange/*) - - ✅ Add session-based authentication for protected routes - - ✅ Create transaction history and balance tracking APIs - - ✅ Fix all import and syntax errors in coordinator API - -## Stage 13 — Explorer Live API & Reverse Proxy Fixes [COMPLETED: 2025-12-28] - -- **Explorer Live API** - - ✅ Enable coordinator explorer routes at `/v1/explorer/*`. - - ✅ Expose nginx explorer proxy at `/api/explorer/*` (maps to backend `/v1/explorer/*`). - - ✅ Fix response schema mismatches (e.g., receipts response uses `jobId`). - -- **Coordinator API Users/Login** - - ✅ Ensure `/v1/users/login` is registered and working. - - ✅ Fix missing SQLModel tables by initializing DB on startup (wallet/user tables created). - -- **nginx Reverse Proxy Hardening** - - ✅ Fix `/api/v1/*` routing to avoid double `/v1` prefix. - - ✅ Add compatibility proxy for Exchange: `/api/users/*` → backend `/v1/users/*`. - -## Stage 12 — Zero-Knowledge Proof Implementation [COMPLETED: 2025-12-28] - -- **Circom Compiler Setup** - - ✅ Install Circom compiler v2.2.3 on production server - - ✅ Configure Node.js environment for ZK circuit compilation - - ✅ Install circomlib and required dependencies - -- **ZK Circuit Development** - - ✅ Create receipt attestation circuit (receipt_simple.circom) - - ✅ Implement membership proof circuit template - - ✅ Implement bid range proof circuit template - - ✅ Compile circuits to R1CS, WASM, and symbolic files - -- **Trusted Setup Ceremony** - - ✅ Perform Powers of Tau setup ceremony (2^12) - - ✅ Generate proving keys (zkey) for Groth16 - - ✅ Export verification keys for on-chain verification - - ✅ Complete phase 2 preparation with contributions - -- **ZK Applications API** - - ✅ Implement identity commitment endpoints - - ✅ Create stealth address generation service - - ✅ Add private receipt attestation API - - ✅ Implement group membership proof verification - - ✅ Add private bidding functionality - - ✅ Create computation proof verification - - ✅ Deploy to production at /api/zk/ endpoints - -- **Integration & Deployment** - - ✅ Integrate ZK proof service with coordinator API - - ✅ Configure circuit files in production environment - - ✅ Enable ZK proof generation in coordinator service - - ✅ Update documentation with ZK capabilities - -## Stage 14 — Explorer JavaScript Error Fixes [COMPLETED: 2025-12-30] - -- **JavaScript Error Resolution** - - ✅ Fixed "can't access property 'length', t is undefined" error on Explorer page load - - ✅ Updated fetchMock function in mockData.ts to return correct structure with 'items' property - - ✅ Added defensive null checks in all page init functions (overview, blocks, transactions, addresses, receipts) - - ✅ Fixed TypeScript errors for null checks and missing properties - - ✅ Deployed fixes to production server (/var/www/aitbc.bubuit.net/explorer/) - - ✅ Configured mock data serving from correct path (/explorer/mock/) - -## Stage 15 — Cascade Skills Framework [COMPLETED: 2025-01-19] - -- **Skills Infrastructure** - - ✅ Implement Cascade skills framework for complex workflow automation - - ✅ Create skills directory structure at `.windsurf/skills/` - - ✅ Define skill metadata format with YAML frontmatter - - ✅ Add progressive disclosure for intelligent skill invocation - -- **Deploy-Production Skill** - - ✅ Create comprehensive deployment workflow skill - - ✅ Implement pre-deployment validation script (disk, memory, services, SSL) - - ✅ Add environment template with all production variables - - ✅ Create rollback procedures with emergency steps - - ✅ Build health check script for post-deployment verification - -- **Blockchain-Operations Skill** - - ✅ Create node health monitoring with peer analysis and sync status - - ✅ Implement transaction tracer for debugging and gas optimization - - ✅ Build GPU mining optimization script for NVIDIA/AMD cards - - ✅ Add real-time sync monitor with visual progress bar - - ✅ Create network diagnostics tool with connectivity analysis - -- **Skills Integration** - - ✅ Enable automatic skill invocation based on context - - ✅ Add manual skill triggering with keyword detection - - ✅ Implement error handling and logging in all skills - - ✅ Create comprehensive documentation and usage examples - -## Stage 16 — Service Maintenance & Optimization [COMPLETED: 2026-01-21] - -- **Service Recovery** - - ✅ Diagnose and fix all failing AITBC container services - - ✅ Resolve duplicate service conflicts causing port binding errors - - ✅ Fix marketplace service implementation (missing server.py) - - ✅ Disable redundant services to prevent resource conflicts - -- **System Administration** - - ✅ Configure passwordless SSH access for automation - - ✅ Create dedicated SSH keys for secure service management - - ✅ Document service dependencies and port mappings - - ✅ Establish service monitoring procedures - -- **Service Status Verification** - - ✅ Verify all 7 core services running correctly - - ✅ Confirm proper nginx reverse proxy configuration - - ✅ Validate API endpoints accessibility - - ✅ Test service recovery procedures - -## Stage 17 — Ollama GPU Inference & CLI Tooling [COMPLETED: 2026-01-24] - -- **End-to-End Ollama Testing** - - ✅ Verify complete GPU inference workflow from job submission to receipt generation - - ✅ Test Ollama integration with multiple models (llama3.2, mistral, deepseek, etc.) - - ✅ Validate job lifecycle: QUEUED → RUNNING → COMPLETED - - ✅ Confirm receipt generation with accurate payment calculations - - ✅ Record transactions on blockchain with proper metadata - -- **Coordinator API Bug Fixes** - - ✅ Fix missing `_coerce_float()` helper function causing 500 errors - - ✅ Deploy fix to production incus container via SSH - - ✅ Verify result submission returns 200 OK with valid receipts - - ✅ Validate receipt payload structure and signature generation - -- **Miner Configuration & Optimization** - - ✅ Fix miner ID mismatch (host-gpu-miner → REDACTED_MINER_KEY) - - ✅ Enhance logging with explicit flush handlers for systemd journal - - ✅ Configure unbuffered Python logging environment variables - - ✅ Create systemd service unit with proper environment configuration - -- **CLI Tooling Development** - - ✅ Create unified bash CLI wrapper (`scripts/aitbc-cli.sh`) - - ✅ Implement commands: submit, status, browser, blocks, receipts, cancel - - ✅ Add admin commands: admin-miners, admin-jobs, admin-stats - - ✅ Support environment variable overrides for URL and API keys - - ✅ Make script executable and document usage patterns - -- **Blockchain-Operations Skill Enhancement** - - ✅ Add comprehensive Ollama testing scenarios to skill - - ✅ Create detailed test documentation (`ollama-test-scenario.md`) - - ✅ Document common issues and troubleshooting procedures - - ✅ Add performance metrics and expected results - - ✅ Include end-to-end automation script template - -- **Documentation Updates** - - ✅ Update localhost testing scenario with CLI wrapper usage - - ✅ Convert examples to use localhost URLs (127.0.0.1) - - ✅ Add host user paths and quick start commands - - ✅ Document complete workflow from setup to verification - - ✅ Update skill documentation with testing scenarios - -## Stage 18 — Repository Reorganization & CSS Consolidation [COMPLETED: 2026-01-24] - -- **Root Level Cleanup** - - ✅ Move 60+ loose files from root to proper directories - - ✅ Organize deployment scripts into `scripts/deploy/` - - ✅ Organize GPU miner files into `scripts/gpu/` - - ✅ Organize test/verify files into `scripts/test/` - - ✅ Organize service management scripts into `scripts/service/` - - ✅ Move systemd services to `systemd/` - - ✅ Move nginx configs to `infra/nginx/` - - ✅ Move dashboards to `website/dashboards/` - -- **Website/Docs Folder Structure** - - ✅ Establish `/website/docs/` as source for HTML documentation - - ✅ Create shared CSS file (`css/docs.css`) with 1232 lines - - ✅ Create theme toggle JavaScript (`js/theme.js`) - - ✅ Migrate all HTML files to use external CSS (45-66% size reduction) - - ✅ Clean `/docs/` folder to only contain mkdocs markdown files - -- **Documentation Styling Fixes** - - ✅ Fix dark theme background consistency across all docs pages - - ✅ Add dark theme support to `full-documentation.html` - - ✅ Fix Quick Start section cascade styling in docs-miners.html - - ✅ Fix SDK Examples cascade indentation in docs-clients.html - - ✅ Fix malformed `` tags across all docs - - ✅ Update API endpoint example to use Python/FastAPI - -- **Path Reference Updates** - - ✅ Update systemd service file with new `scripts/gpu/gpu_miner_host.py` path - - ✅ Update skill documentation with new file locations - - ✅ Update localhost-testing-scenario.md with correct paths - - ✅ Update gpu_miner_host_wrapper.sh with new path - -- **Repository Maintenance** - - ✅ Expand .gitignore from 39 to 145 lines with organized sections - - ✅ Add project-specific ignore rules for coordinator, explorer, GPU miner - - ✅ Document final folder structure in done.md - -the canonical checklist during implementation. Mark completed tasks with ✅ and add dates or links to relevant PRs as development progresses. - diff --git a/protocols/receipts/spec.md b/docs/reference/specs/receipt-spec.md similarity index 100% rename from protocols/receipts/spec.md rename to docs/reference/specs/receipt-spec.md diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 5a9ef2d3..00000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,27 +0,0 @@ -# MkDocs Material Theme -mkdocs-material==9.4.8 -mkdocs-material-extensions==1.3.1 - -# MkDocs Core and Plugins -mkdocs==1.5.3 -mkdocs-git-revision-date-localized-plugin==1.2.6 -mkdocs-awesome-pages-plugin==2.9.2 -mkdocs-minify-plugin==0.7.4 -mkdocs-glightbox==0.3.4 -mkdocs-video==1.5.0 -mkdocs-social-plugin==1.0.0 -mkdocs-macros-plugin==1.0.5 - -# Python Extensions for Markdown -pymdown-extensions==10.8.1 -markdown-include==0.8.0 -mkdocs-mermaid2-plugin==1.1.1 - -# Additional dependencies -requests==2.31.0 -aiohttp==3.9.1 -python-dotenv==1.0.0 - -# Development dependencies -mkdocs-redirects==1.2.1 -mkdocs-monorepo-plugin==1.0.2 diff --git a/docs/roadmap.md b/docs/roadmap.md deleted file mode 120000 index 1a8a2bd6..00000000 --- a/docs/roadmap.md +++ /dev/null @@ -1 +0,0 @@ -reference/roadmap.md \ No newline at end of file diff --git a/docs/roadmap.md b/docs/roadmap.md new file mode 100644 index 00000000..3b70b25d --- /dev/null +++ b/docs/roadmap.md @@ -0,0 +1,600 @@ +# AITBC Development Roadmap + +This roadmap aggregates high-priority tasks derived from the bootstrap specifications in `docs/bootstrap/` and tracks progress across the monorepo. Update this document as milestones evolve. + +## Stage 1 — Upcoming Focus Areas [COMPLETED: 2025-12-22] + +- **Blockchain Node Foundations** + - ✅ Bootstrap module layout in `apps/blockchain-node/src/`. + - ✅ Implement SQLModel schemas and RPC stubs aligned with historical/attested receipts. + +- **Explorer Web Enablement** + - ✅ Finish mock integration across all pages and polish styling + mock/live toggle. + - ✅ Begin wiring coordinator endpoints (e.g., `/v1/jobs/{job_id}/receipts`). + +- **Marketplace Web Scaffolding** + - ✅ Scaffold Vite/vanilla frontends consuming coordinator receipt history endpoints and SDK examples. + +- **Pool Hub Services** + - ✅ Initialize FastAPI project, scoring registry, and telemetry ingestion hooks leveraging coordinator/miner metrics. + +- **CI Enhancements** + - ✅ Add blockchain-node tests once available and frontend build/lint checks to `.github/workflows/python-tests.yml` or follow-on workflows. + - ✅ Provide systemd unit + installer scripts under `scripts/` for streamlined deployment. + +## Stage 2 — Core Services (MVP) [COMPLETED: 2025-12-22] + +- **Coordinator API** + - ✅ Scaffold FastAPI project (`apps/coordinator-api/src/app/`). + - ✅ Implement job submission, status, result endpoints. + - ✅ Add miner registration, heartbeat, poll, result routes. + - ✅ Wire SQLite persistence for jobs, miners, receipts (historical `JobReceipt` table). + - ✅ Provide `.env.example`, `pyproject.toml`, and run scripts. + - ✅ Deploy minimal version in container with nginx proxy + +- **Miner Node** + - ✅ Implement capability probe and control loop (register → heartbeat → fetch jobs). + - ✅ Build CLI and Python runners with sandboxed work dirs (result reporting stubbed to coordinator). + +- **Blockchain Node** + - ✅ Define SQLModel schema for blocks, transactions, accounts, receipts (`apps/blockchain-node/src/aitbc_chain/models.py`). + - ✅ Harden schema parity across runtime + storage: + - Alembic baseline + follow-on migrations in `apps/blockchain-node/migrations/` now track the SQLModel schema (blocks, transactions, receipts, accounts). + - Added `Relationship` + `ForeignKey` wiring in `apps/blockchain-node/src/aitbc_chain/models.py` for block ↔ transaction ↔ receipt joins. + - Introduced hex/enum validation hooks via Pydantic validators to ensure hash integrity and safe persistence. + - ✅ Implement PoA proposer loop with block assembly (`apps/blockchain-node/src/aitbc_chain/consensus/poa.py`). + - ✅ Expose REST RPC endpoints for tx submission, balances, receipts (`apps/blockchain-node/src/aitbc_chain/rpc/router.py`). + - ✅ Deliver WebSocket RPC + P2P gossip layer: + - ✅ Stand up WebSocket subscription endpoints (`apps/blockchain-node/src/aitbc_chain/rpc/websocket.py`) mirroring REST payloads. + - ✅ Implement pub/sub transport for block + transaction gossip backed by an in-memory broker (Starlette `Broadcast` or Redis) with configurable fan-out. + - ✅ Add integration tests and load-test harness ensuring gossip convergence and back-pressure handling. + - ✅ Ship devnet scripts (`apps/blockchain-node/scripts/`). + - ✅ Add observability hooks (JSON logging, Prometheus metrics) and integrate coordinator mock into devnet tooling. + - ✅ Expand observability dashboards + miner mock integration: + - Build Grafana dashboards for consensus health (block intervals, proposer rotation) and RPC latency (`apps/blockchain-node/observability/`). + - Expose miner mock telemetry (job throughput, error rates) via shared Prometheus registry and ingest into blockchain-node dashboards. + - Add alerting rules (Prometheus `Alertmanager`) for stalled proposers, queue saturation, and miner mock disconnects. + - Wire coordinator mock into devnet tooling to simulate real-world load and validate observability hooks. + +- **Receipt Schema** + - ✅ Finalize canonical JSON receipt format under `protocols/receipts/` (includes sample signed receipts). + - ✅ Implement signing/verification helpers in `packages/py/aitbc-crypto` (JS SDK pending). + - ✅ Translate `docs/bootstrap/aitbc_tech_plan.md` contract skeleton into Solidity project (`packages/solidity/aitbc-token/`). + - ✅ Add deployment/test scripts and document minting flow (`packages/solidity/aitbc-token/scripts/` and `docs/run.md`). + +- **Wallet Daemon** + - ✅ Implement encrypted keystore (Argon2id + XChaCha20-Poly1305) via `KeystoreService`. + - ✅ Provide REST and JSON-RPC endpoints for wallet management and signing (`api_rest.py`, `api_jsonrpc.py`). + - ✅ Add mock ledger adapter with SQLite backend powering event history (`ledger_mock/`). + - ✅ Integrate Python receipt verification helpers (`aitbc_sdk`) and expose API/service utilities validating miner + coordinator signatures. + - ✅ Harden REST API workflows (create/list/unlock/sign) with structured password policy enforcement and deterministic pytest coverage in `apps/wallet-daemon/tests/test_wallet_api.py`. + - ✅ Implement Wallet SDK receipt ingestion + attestation surfacing: + - Added `/v1/jobs/{job_id}/receipts` client helpers with cursor pagination, retry/backoff, and summary reporting (`packages/py/aitbc-sdk/src/receipts.py`). + - Reused crypto helpers to validate miner and coordinator signatures, capturing per-key failure reasons for downstream UX. + - Surfaced aggregated attestation status (`ReceiptStatus`) and failure diagnostics for SDK + UI consumers; JS helper parity still planned. + +## Stage 3 — Pool Hub & Marketplace [COMPLETED: 2025-12-22] + +- **Pool Hub** + - ✅ Implement miner registry, scoring engine, and `/v1/match` API with Redis/PostgreSQL backing stores. + - ✅ Add observability endpoints (`/v1/health`, `/v1/metrics`) plus Prometheus instrumentation and integration tests. + +- **Marketplace Web** + - ✅ Initialize Vite project with vanilla TypeScript (`apps/marketplace-web/`). + - ✅ Build offer list, bid form, and stats cards powered by mock data fixtures (`public/mock/`). + - ✅ Provide API abstraction toggling mock/live mode (`src/lib/api.ts`) and wire coordinator endpoints. + - ✅ Validate live mode against coordinator `/v1/marketplace/*` responses and add auth feature flags for rollout. + - ✅ Deploy to production at https://aitbc.bubuit.net/marketplace/ + +- **Explorer Web** + - ✅ Initialize Vite + TypeScript project scaffold (`apps/explorer-web/`). + - ✅ Add routed pages for overview, blocks, transactions, addresses, receipts. + - ✅ Seed mock datasets (`public/mock/`) and fetch helpers powering overview + blocks tables. + - ✅ Extend mock integrations to transactions, addresses, and receipts pages. + - ✅ Implement styling system, mock/live data toggle, and coordinator API wiring scaffold. + - ✅ Render overview stats from mock block/transaction/receipt summaries with graceful empty-state fallbacks. + - ✅ Validate live mode + responsive polish: + - Hit live coordinator endpoints via nginx (`/api/explorer/blocks`, `/api/explorer/transactions`, `/api/explorer/addresses`, `/api/explorer/receipts`) via `getDataMode() === "live"` and reconcile payloads with UI models. + - Add fallbacks + error surfacing for partial/failed live responses (toast + console diagnostics). + - Audit responsive breakpoints (`public/css/layout.css`) and adjust grid/typography for tablet + mobile; add regression checks in Percy/Playwright snapshots. + - ✅ Deploy to production at https://aitbc.bubuit.net/explorer/ with genesis block display + +## Stage 4 — Observability & Production Polish + +- **Observability & Telemetry** + - ✅ Build Grafana dashboards for PoA consensus health (block intervals, proposer rotation cadence) leveraging `poa_last_block_interval_seconds`, `poa_proposer_rotations_total`, and per-proposer counters. + - ✅ Surface RPC latency histograms/summaries for critical endpoints (`rpc_get_head`, `rpc_send_tx`, `rpc_submit_receipt`) and add Grafana panels with SLO thresholds. + - ✅ Ingest miner mock telemetry (job throughput, failure rate) into the shared Prometheus registry and wire panels/alerts that correlate miner health with consensus metrics. + +- **Explorer Web (Live Mode)** + - ✅ Finalize live `getDataMode() === "live"` workflow: align API payload contracts, render loading/error states, and persist mock/live toggle preference. + - ✅ Expand responsive testing (tablet/mobile) and add automated visual regression snapshots prior to launch. + - ✅ Integrate Playwright smoke tests covering overview, blocks, and transactions pages in live mode. + +- **Marketplace Web (Launch Readiness)** + - ✅ Connect mock listings/bids to coordinator data sources and provide feature flags for live mode rollout. + - ✅ Implement auth/session scaffolding for marketplace actions and document API assumptions in `apps/marketplace-web/README.md`. + - ✅ Add Grafana panels monitoring marketplace API throughput and error rates once endpoints are live. + +- **Operational Hardening** + - ✅ Extend Alertmanager rules to cover RPC error spikes, proposer stalls, and miner disconnects using the new metrics. + - ✅ Document dashboard import + alert deployment steps in `docs/run.md` for operators. + - ✅ Prepare Stage 3 release checklist linking dashboards, alerts, and smoke tests prior to production cutover. + - ✅ Enable host GPU miner with coordinator proxy routing and systemd-backed coordinator service; add proxy health timer. + +## Stage 5 — Scaling & Release Readiness + +- **Infrastructure Scaling** + - ✅ Benchmark blockchain node throughput under sustained load; capture CPU/memory targets and suggest horizontal scaling thresholds. + - ✅ Build Terraform/Helm templates for dev/staging/prod environments, including Prometheus/Grafana bundles. + - ✅ Implement autoscaling policies for coordinator, miners, and marketplace services with synthetic traffic tests. + +- **Reliability & Compliance** + - ✅ Formalize backup/restore procedures for PostgreSQL, Redis, and ledger storage with scheduled jobs. + - ✅ Complete security hardening review (TLS termination, API auth, secrets management) and document mitigations in `docs/security.md`. + - ✅ Add chaos testing scripts (network partition, coordinator outage) and track mean-time-to-recovery metrics. + +- **Product Launch Checklist** + - ✅ Finalize public documentation (API references, onboarding guides) and publish to the docs portal. + - ✅ Coordinate beta release timeline, including user acceptance testing of explorer/marketplace live modes. + - ✅ Establish post-launch monitoring playbooks and on-call rotations. + +## Stage 6 — Ecosystem Expansion + +- **Cross-Chain & Interop** + - ✅ Prototype cross-chain settlement hooks leveraging external bridges; document integration patterns. + - ✅ Extend SDKs (Python/JS) with pluggable transport abstractions for multi-network support. + - 🔄 Evaluate third-party explorer/analytics integrations and publish partner onboarding guides. + +- **Marketplace Growth** + - 🔄 Launch incentive programs (staking, liquidity mining) and expose telemetry dashboards tracking campaign performance. + - 🔄 Implement governance module (proposal voting, parameter changes) and add API/UX flows to explorer/marketplace. + - 🔄 Provide SLA-backed coordinator/pool hubs with capacity planning and billing instrumentation. + +- **Developer Experience** + - ✅ Publish advanced tutorials (custom proposers, marketplace extensions) and maintain versioned API docs. + - 🔄 Integrate CI/CD pipelines with canary deployments and blue/green release automation. + - 🔄 Host quarterly architecture reviews capturing lessons learned and feeding into roadmap revisions. + +## Stage 7 — Innovation & Ecosystem Services + +- **GPU Service Expansion** + - ✅ Implement dynamic service registry framework for 30+ GPU-accelerated services + - ✅ Create service definitions for AI/ML (LLM inference, image/video generation, speech recognition, computer vision, recommendation systems) + - ✅ Create service definitions for Media Processing (video transcoding, streaming, 3D rendering, image/audio processing) + - ✅ Create service definitions for Scientific Computing (molecular dynamics, weather modeling, financial modeling, physics simulation, bioinformatics) + - ✅ Create service definitions for Data Analytics (big data processing, real-time analytics, graph analytics, time series analysis) + - ✅ Create service definitions for Gaming & Entertainment (cloud gaming, asset baking, physics simulation, VR/AR rendering) + - ✅ Create service definitions for Development Tools (GPU compilation, model training, data processing, simulation testing, code generation) + - ✅ Deploy service provider configuration UI with dynamic service selection + - ✅ Implement service-specific validation and hardware requirement checking + +- **Advanced Cryptography & Privacy** + - ✅ Research zk-proof-based receipt attestation and prototype a privacy-preserving settlement flow. + - ✅ Add confidential transaction support with opt-in ciphertext storage and HSM-backed key management. + - ✅ Publish threat modeling updates and share mitigations with ecosystem partners. + +- **Enterprise Integrations** + - ✅ Deliver reference connectors for ERP/payment systems and document SLA expectations. + - ✅ Stand up multi-tenant coordinator infrastructure with per-tenant isolation and billing metrics. + - ✅ Launch ecosystem certification program (SDK conformance, security best practices) with public registry. + +- **Community & Governance** + - ✅ Establish open RFC process, publish governance website, and schedule regular community calls. + - ✅ Sponsor hackathons/accelerators and provide grants for marketplace extensions and analytics tooling. + - ✅ Track ecosystem KPIs (active marketplaces, cross-chain volume) and feed them into quarterly strategy reviews. + +## Stage 8 — Frontier R&D & Global Expansion [COMPLETED: 2025-12-28] + +- **Protocol Evolution** + - ✅ Launch research consortium exploring next-gen consensus (hybrid PoA/PoS) and finalize whitepapers. + - 🔄 Prototype sharding or rollup architectures to scale throughput beyond current limits. + - 🔄 Standardize interoperability specs with industry bodies and submit proposals for adoption. + +- **Global Rollout** + - 🔄 Establish regional infrastructure hubs (multi-cloud) with localized compliance and data residency guarantees. + - 🔄 Partner with regulators/enterprises to pilot regulated marketplaces and publish compliance playbooks. + - 🔄 Expand localization (UI, documentation, support) covering top target markets. + +- **Long-Term Sustainability** + - 🔄 Create sustainability fund for ecosystem maintenance, bug bounties, and community stewardship. + - 🔄 Define succession planning for core teams, including training programs and contributor pathways. + - 🔄 Publish bi-annual roadmap retrospectives assessing KPI alignment and revising long-term goals. + +## Stage 9 — Moonshot Initiatives [COMPLETED: 2025-12-28] + +- **Decentralized Infrastructure** + - 🔄 Transition coordinator/miner roles toward community-governed validator sets with incentive alignment. + - 🔄 Explore decentralized storage/backbone options (IPFS/Filecoin) for ledger and marketplace artifacts. + - 🔄 Prototype fully trustless marketplace settlement leveraging zero-knowledge rollups. + +- **AI & Automation** + - 🔄 Integrate AI-driven monitoring/anomaly detection for proposer health, market liquidity, and fraud detection. + - 🔄 Automate incident response playbooks with ChatOps and policy engines. + - 🔄 Launch research into autonomous agent participation (AI agents bidding/offering in the marketplace) and governance implications. +- **Global Standards Leadership** + - 🔄 Chair industry working groups defining receipt/marketplace interoperability standards. + - 🔄 Publish annual transparency reports and sustainability metrics for stakeholders. + - 🔄 Engage with academia and open-source foundations to steward long-term protocol evolution. + +### Stage 10 — Stewardship & Legacy Planning [COMPLETED: 2025-12-28] + +- **Open Governance Maturity** + - 🔄 Transition roadmap ownership to community-elected councils with transparent voting and treasury controls. + - 🔄 Codify constitutional documents (mission, values, conflict resolution) and publish public charters. + - 🔄 Implement on-chain governance modules for protocol upgrades and ecosystem-wide decisions. + +- **Educational & Outreach Programs** + - 🔄 Fund university partnerships, research chairs, and developer fellowships focused on decentralized marketplace tech. + - 🔄 Create certification tracks and mentorship programs for new validator/operators. + - 🔄 Launch annual global summit and publish proceedings to share best practices across partners. + +- **Long-Term Preservation** + - 🔄 Archive protocol specs, governance records, and cultural artifacts in decentralized storage with redundancy. + - 🔄 Establish legal/organizational frameworks to ensure continuity across jurisdictions. + - 🔄 Develop end-of-life/transition plans for legacy components, documenting deprecation strategies and migration tooling. + + +## Shared Libraries & Examples + +## Stage 11 — Trade Exchange & Token Economy [COMPLETED: 2025-12-28] + +- **Bitcoin Wallet Integration** + - ✅ Implement Bitcoin payment gateway for AITBC token purchases + - ✅ Create payment request API with unique payment addresses + - ✅ Add QR code generation for mobile payments + - ✅ Implement real-time payment monitoring with blockchain API + - ✅ Configure exchange rate: 1 BTC = 100,000 AITBC + +- **User Management System** + - ✅ Implement wallet-based authentication with session management + - ✅ Create individual user accounts with unique wallets + - ✅ Add user profile pages with transaction history + - ✅ Implement secure session tokens with 24-hour expiry + - ✅ Add login/logout functionality across all pages + +- **Trade Exchange Platform** + - ✅ Build responsive trading interface with real-time price updates + - ✅ Integrate Bitcoin payment flow with QR code display + - ✅ Add payment status monitoring and confirmation handling + - ✅ Implement AITBC token minting upon payment confirmation + - ✅ Deploy to production at https://aitbc.bubuit.net/Exchange/ + +- **API Infrastructure** + - ✅ Add user management endpoints (/api/users/*) + - ✅ Implement exchange payment endpoints (/api/exchange/*) + - ✅ Add session-based authentication for protected routes + - ✅ Create transaction history and balance tracking APIs + - ✅ Fix all import and syntax errors in coordinator API + +## Stage 13 — Explorer Live API & Reverse Proxy Fixes [COMPLETED: 2025-12-28] + +- **Explorer Live API** + - ✅ Enable coordinator explorer routes at `/v1/explorer/*`. + - ✅ Expose nginx explorer proxy at `/api/explorer/*` (maps to backend `/v1/explorer/*`). + - ✅ Fix response schema mismatches (e.g., receipts response uses `jobId`). + +- **Coordinator API Users/Login** + - ✅ Ensure `/v1/users/login` is registered and working. + - ✅ Fix missing SQLModel tables by initializing DB on startup (wallet/user tables created). + +- **nginx Reverse Proxy Hardening** + - ✅ Fix `/api/v1/*` routing to avoid double `/v1` prefix. + - ✅ Add compatibility proxy for Exchange: `/api/users/*` → backend `/v1/users/*`. + +## Stage 12 — Zero-Knowledge Proof Implementation [COMPLETED: 2025-12-28] + +- **Circom Compiler Setup** + - ✅ Install Circom compiler v2.2.3 on production server + - ✅ Configure Node.js environment for ZK circuit compilation + - ✅ Install circomlib and required dependencies + +- **ZK Circuit Development** + - ✅ Create receipt attestation circuit (receipt_simple.circom) + - ✅ Implement membership proof circuit template + - ✅ Implement bid range proof circuit template + - ✅ Compile circuits to R1CS, WASM, and symbolic files + +- **Trusted Setup Ceremony** + - ✅ Perform Powers of Tau setup ceremony (2^12) + - ✅ Generate proving keys (zkey) for Groth16 + - ✅ Export verification keys for on-chain verification + - ✅ Complete phase 2 preparation with contributions + +- **ZK Applications API** + - ✅ Implement identity commitment endpoints + - ✅ Create stealth address generation service + - ✅ Add private receipt attestation API + - ✅ Implement group membership proof verification + - ✅ Add private bidding functionality + - ✅ Create computation proof verification + - ✅ Deploy to production at /api/zk/ endpoints + +- **Integration & Deployment** + - ✅ Integrate ZK proof service with coordinator API + - ✅ Configure circuit files in production environment + - ✅ Enable ZK proof generation in coordinator service + - ✅ Update documentation with ZK capabilities + +## Stage 14 — Explorer JavaScript Error Fixes [COMPLETED: 2025-12-30] + +- **JavaScript Error Resolution** + - ✅ Fixed "can't access property 'length', t is undefined" error on Explorer page load + - ✅ Updated fetchMock function in mockData.ts to return correct structure with 'items' property + - ✅ Added defensive null checks in all page init functions (overview, blocks, transactions, addresses, receipts) + - ✅ Fixed TypeScript errors for null checks and missing properties + - ✅ Deployed fixes to production server (/var/www/aitbc.bubuit.net/explorer/) + - ✅ Configured mock data serving from correct path (/explorer/mock/) + +## Stage 15 — Cascade Skills Framework [COMPLETED: 2025-01-19] + +- **Skills Infrastructure** + - ✅ Implement Cascade skills framework for complex workflow automation + - ✅ Create skills directory structure at `.windsurf/skills/` + - ✅ Define skill metadata format with YAML frontmatter + - ✅ Add progressive disclosure for intelligent skill invocation + +- **Deploy-Production Skill** + - ✅ Create comprehensive deployment workflow skill + - ✅ Implement pre-deployment validation script (disk, memory, services, SSL) + - ✅ Add environment template with all production variables + - ✅ Create rollback procedures with emergency steps + - ✅ Build health check script for post-deployment verification + +- **Blockchain-Operations Skill** + - ✅ Create node health monitoring with peer analysis and sync status + - ✅ Implement transaction tracer for debugging and gas optimization + - ✅ Build GPU mining optimization script for NVIDIA/AMD cards + - ✅ Add real-time sync monitor with visual progress bar + - ✅ Create network diagnostics tool with connectivity analysis + +- **Skills Integration** + - ✅ Enable automatic skill invocation based on context + - ✅ Add manual skill triggering with keyword detection + - ✅ Implement error handling and logging in all skills + - ✅ Create comprehensive documentation and usage examples + +## Stage 16 — Service Maintenance & Optimization [COMPLETED: 2026-01-21] + +- **Service Recovery** + - ✅ Diagnose and fix all failing AITBC container services + - ✅ Resolve duplicate service conflicts causing port binding errors + - ✅ Fix marketplace service implementation (missing server.py) + - ✅ Disable redundant services to prevent resource conflicts + +- **System Administration** + - ✅ Configure passwordless SSH access for automation + - ✅ Create dedicated SSH keys for secure service management + - ✅ Document service dependencies and port mappings + - ✅ Establish service monitoring procedures + +- **Service Status Verification** + - ✅ Verify all 7 core services running correctly + - ✅ Confirm proper nginx reverse proxy configuration + - ✅ Validate API endpoints accessibility + - ✅ Test service recovery procedures + +## Stage 17 — Ollama GPU Inference & CLI Tooling [COMPLETED: 2026-01-24] + +- **End-to-End Ollama Testing** + - ✅ Verify complete GPU inference workflow from job submission to receipt generation + - ✅ Test Ollama integration with multiple models (llama3.2, mistral, deepseek, etc.) + - ✅ Validate job lifecycle: QUEUED → RUNNING → COMPLETED + - ✅ Confirm receipt generation with accurate payment calculations + - ✅ Record transactions on blockchain with proper metadata + +- **Coordinator API Bug Fixes** + - ✅ Fix missing `_coerce_float()` helper function causing 500 errors + - ✅ Deploy fix to production incus container via SSH + - ✅ Verify result submission returns 200 OK with valid receipts + - ✅ Validate receipt payload structure and signature generation + +- **Miner Configuration & Optimization** + - ✅ Fix miner ID mismatch (host-gpu-miner → REDACTED_MINER_KEY) + - ✅ Enhance logging with explicit flush handlers for systemd journal + - ✅ Configure unbuffered Python logging environment variables + - ✅ Create systemd service unit with proper environment configuration + +- **CLI Tooling Development** + - ✅ Create unified bash CLI wrapper (`scripts/aitbc-cli.sh`) + - ✅ Implement commands: submit, status, browser, blocks, receipts, cancel + - ✅ Add admin commands: admin-miners, admin-jobs, admin-stats + - ✅ Support environment variable overrides for URL and API keys + - ✅ Make script executable and document usage patterns + +- **Blockchain-Operations Skill Enhancement** + - ✅ Add comprehensive Ollama testing scenarios to skill + - ✅ Create detailed test documentation (`ollama-test-scenario.md`) + - ✅ Document common issues and troubleshooting procedures + - ✅ Add performance metrics and expected results + - ✅ Include end-to-end automation script template + +- **Documentation Updates** + - ✅ Update localhost testing scenario with CLI wrapper usage + - ✅ Convert examples to use localhost URLs (127.0.0.1) + - ✅ Add host user paths and quick start commands + - ✅ Document complete workflow from setup to verification + - ✅ Update skill documentation with testing scenarios + +## Stage 18 — Repository Reorganization & CSS Consolidation [COMPLETED: 2026-01-24] + +- **Root Level Cleanup** + - ✅ Move 60+ loose files from root to proper directories + - ✅ Organize deployment scripts into `scripts/deploy/` + - ✅ Organize GPU miner files into `scripts/gpu/` + - ✅ Organize test/verify files into `scripts/test/` + - ✅ Organize service management scripts into `scripts/service/` + - ✅ Move systemd services to `systemd/` + - ✅ Move nginx configs to `infra/nginx/` + - ✅ Move dashboards to `website/dashboards/` + +- **Website/Docs Folder Structure** + - ✅ Establish `/website/docs/` as source for HTML documentation + - ✅ Create shared CSS file (`css/docs.css`) with 1232 lines + - ✅ Create theme toggle JavaScript (`js/theme.js`) + - ✅ Migrate all HTML files to use external CSS (45-66% size reduction) + - ✅ Clean `/docs/` folder to only contain mkdocs markdown files + +- **Documentation Styling Fixes** + - ✅ Fix dark theme background consistency across all docs pages + - ✅ Add dark theme support to `full-documentation.html` + - ✅ Fix Quick Start section cascade styling in docs-miners.html + - ✅ Fix SDK Examples cascade indentation in docs-clients.html + - ✅ Fix malformed `` tags across all docs + - ✅ Update API endpoint example to use Python/FastAPI + +- **Path Reference Updates** + - ✅ Update systemd service file with new `scripts/gpu/gpu_miner_host.py` path + - ✅ Update skill documentation with new file locations + - ✅ Update localhost-testing-scenario.md with correct paths + - ✅ Update gpu_miner_host_wrapper.sh with new path + +- **Repository Maintenance** + - ✅ Expand .gitignore from 39 to 145 lines with organized sections + - ✅ Add project-specific ignore rules for coordinator, explorer, GPU miner + - ✅ Document final folder structure in done.md + - ✅ Create `docs/files.md` file audit with whitelist/greylist/blacklist + - ✅ Remove 35 abandoned/duplicate folders and files + - ✅ Reorganize `docs/` folder - root contains only done.md, files.md, roadmap.md + - ✅ Move 25 doc files to appropriate subfolders (components, deployment, migration, etc.) + +## Stage 19 — Placeholder Content Development [PLANNED] + +Fill the intentional placeholder folders with actual content. Priority order based on user impact. + +### Phase 1: Documentation (High Priority) + +- **User Guides** (`docs/user/guides/`) + - [ ] Getting started guide for new users + - [ ] Wallet setup and management + - [ ] Job submission workflow + - [ ] Payment and receipt understanding + - [ ] Troubleshooting common issues + +- **Developer Tutorials** (`docs/developer/tutorials/`) + - [ ] Building a custom miner + - [ ] Integrating with Coordinator API + - [ ] Creating marketplace extensions + - [ ] Working with ZK proofs + - [ ] SDK usage examples (Python/JS) + +- **Reference Specs** (`docs/reference/specs/`) + - [ ] Receipt JSON schema specification + - [ ] API endpoint reference (OpenAPI) + - [ ] Protocol message formats + - [ ] Error codes and handling + +### Phase 2: Infrastructure (Medium Priority) + +- **Terraform Environments** (`infra/terraform/environments/`) + - [ ] `staging/` - Staging environment config + - [ ] `prod/` - Production environment config + - [ ] Variables and secrets management + - [ ] State backend configuration + +- **Helm Chart Values** (`infra/helm/values/`) + - [ ] `dev/` - Development values + - [ ] `staging/` - Staging values + - [ ] `prod/` - Production values + - [ ] Resource limits and scaling policies + +### Phase 3: Application Components (Lower Priority) + +- **Pool Hub Service** (`apps/pool-hub/src/app/`) + - [ ] `routers/` - API route handlers + - [ ] `registry/` - Miner registry implementation + - [ ] `scoring/` - Scoring engine logic + +- **Coordinator Migrations** (`apps/coordinator-api/migrations/`) + - [ ] Initial schema migration + - [ ] Index optimizations + - [ ] Data migration scripts + +### Placeholder Filling Schedule + +| Folder | Target Date | Owner | Status | +|--------|-------------|-------|--------| +| `docs/user/guides/` | Q1 2026 | Documentation | 🔄 Planned | +| `docs/developer/tutorials/` | Q1 2026 | Documentation | 🔄 Planned | +| `docs/reference/specs/` | Q1 2026 | Documentation | 🔄 Planned | +| `infra/terraform/environments/` | Q2 2026 | DevOps | 🔄 Planned | +| `infra/helm/values/` | Q2 2026 | DevOps | 🔄 Planned | +| `apps/pool-hub/src/app/` | Q2 2026 | Backend | 🔄 Planned | +| `apps/coordinator-api/migrations/` | As needed | Backend | 🔄 Planned | + +## Stage 20 — Technical Debt Remediation [PLANNED] + +Address known issues in existing components that are blocking production use. + +### Blockchain Node (`apps/blockchain-node/`) + +Current Status: Has 9 Python files but SQLModel/SQLAlchemy compatibility issues. + +- **SQLModel Compatibility** + - [ ] Audit current SQLModel schema definitions in `models.py` + - [ ] Fix relationship and foreign key wiring issues + - [ ] Resolve Alembic migration compatibility + - [ ] Add integration tests for database operations + - [ ] Document schema and migration procedures + +- **Production Readiness** + - [ ] Fix PoA consensus loop stability + - [ ] Harden RPC endpoints for production load + - [ ] Add proper error handling and logging + - [ ] Create deployment documentation + +### Solidity Token (`packages/solidity/aitbc-token/`) + +Current Status: Smart contracts exist but not deployed to mainnet. + +- **Contract Audit** + - [ ] Review AIToken.sol and AITokenRegistry.sol + - [ ] Run security analysis (Slither, Mythril) + - [ ] Fix any identified vulnerabilities + - [ ] Add comprehensive test coverage + +- **Deployment Preparation** + - [ ] Configure deployment scripts for testnet + - [ ] Deploy to testnet and verify + - [ ] Document deployment process + - [ ] Plan mainnet deployment timeline + +### ZK Receipt Verifier (`contracts/ZKReceiptVerifier.sol`) + +Current Status: 240-line Groth16 verifier contract ready for deployment. + +- **Integration with ZK Circuits** + - [ ] Verify compatibility with deployed `receipt_simple` circuit + - [ ] Test proof generation and verification flow + - [ ] Configure settlement contract integration + - [ ] Add authorized verifier management + +- **Deployment** + - [ ] Deploy to testnet with ZK circuits + - [ ] Integration test with Coordinator API + - [ ] Document on-chain verification flow + +### Receipt Specification (`docs/reference/specs/receipt-spec.md`) + +Current Status: Canonical receipt schema specification moved from `protocols/receipts/`. + +- **Specification Finalization** + - [x] Core schema defined (version 1.0) + - [x] Signature format specified (Ed25519) + - [x] Validation rules documented + - [ ] Add multi-signature receipt format + - [ ] Document ZK-proof metadata extension + - [ ] Add Merkle proof anchoring spec + +### Technical Debt Schedule + +| Component | Priority | Target | Status | +|-----------|----------|--------|--------| +| `apps/blockchain-node/` SQLModel fixes | Medium | Q2 2026 | 🔄 Planned | +| `packages/solidity/aitbc-token/` audit | Low | Q3 2026 | 🔄 Planned | +| `packages/solidity/aitbc-token/` testnet | Low | Q3 2026 | 🔄 Planned | +| `contracts/ZKReceiptVerifier.sol` deploy | Low | Q3 2026 | 🔄 Planned | +| `docs/reference/specs/receipt-spec.md` finalize | Low | Q2 2026 | 🔄 Planned | + +the canonical checklist during implementation. Mark completed tasks with ✅ and add dates or links to relevant PRs as development progresses. + diff --git a/docs/user-guide/creating-jobs.md b/docs/user-guide/creating-jobs.md deleted file mode 100644 index b9f66ce3..00000000 --- a/docs/user-guide/creating-jobs.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: Creating Jobs -description: Learn how to create and submit AI jobs ---- - -# Creating Jobs - -Jobs are the primary way to execute AI workloads on the AITBC platform. - -## Job Types - -- **AI Inference**: Run pre-trained models -- **Model Training**: Train new models -- **Data Processing**: Process datasets -- **Custom**: Custom computations - -## Job Specification - -A job specification includes: -- Model configuration -- Input/output formats -- Resource requirements -- Pricing constraints - -## Example - -```yaml -name: "image-classification" -type: "ai-inference" -model: - type: "python" - entrypoint: "model.py" -``` - -## Submitting Jobs - -Use the CLI or API to submit jobs: - -```bash -aitbc job submit job.yaml -``` - -## Monitoring - -Track job progress through: -- CLI commands -- Web interface -- API endpoints -- WebSocket streams diff --git a/docs/user-guide/explorer.md b/docs/user-guide/explorer.md deleted file mode 100644 index 516ebc77..00000000 --- a/docs/user-guide/explorer.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: Explorer -description: Using the AITBC blockchain explorer ---- - -# Explorer - -The AITBC explorer allows you to browse and search the blockchain for transactions, jobs, and other activities. - -## Features - -### Transaction Search -- Search by transaction hash -- Filter by address -- View transaction details - -### Job Tracking -- Monitor job status -- View job history -- Analyze performance - -### Analytics -- Network statistics -- Volume metrics -- Activity charts - -## Using the Explorer - -### Web Interface -Visit [https://aitbc.bubuit.net/explorer/](https://aitbc.bubuit.net/explorer/) - -### API Access -```bash -# Get transaction -curl https://aitbc.bubuit.net/api/v1/transactions/{tx_hash} - -# Get job details -curl https://aitbc.bubuit.net/api/v1/jobs/{job_id} - -# Explorer data (blocks) -curl https://aitbc.bubuit.net/api/explorer/blocks -``` - -## Advanced Features - -- Real-time updates -- Custom dashboards -- Data export -- Alert notifications diff --git a/docs/user-guide/marketplace.md b/docs/user-guide/marketplace.md deleted file mode 100644 index 8496f87e..00000000 --- a/docs/user-guide/marketplace.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: Marketplace -description: Using the AITBC marketplace ---- - -# Marketplace - -The AITBC marketplace connects job creators with miners who can execute their AI workloads. - -## How It Works - -1. **Job Creation**: Users create jobs with specific requirements -2. **Offer Matching**: The marketplace finds suitable miners -3. **Execution**: Miners execute the jobs and submit results -4. **Payment**: Automatic payment upon successful completion - -## Finding Services - -Browse available services: -- By job type -- By price range -- By miner reputation -- By resource requirements - -## Pricing - -Dynamic pricing based on: -- Market demand -- Resource availability -- Miner reputation -- Job complexity - -## Creating Offers - -As a miner, you can: -- Set your prices -- Specify job types -- Define resource limits -- Build reputation - -## Safety Features - -- Escrow payments -- Dispute resolution -- Reputation system -- Cryptographic proofs diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md deleted file mode 100644 index 61be3785..00000000 --- a/docs/user-guide/overview.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: User Guide Overview -description: Learn how to use AITBC as a user ---- - -# User Guide Overview - -Welcome to the AITBC user guide! This section will help you understand how to interact with the AITBC platform. - -## What You'll Learn - -- Creating and submitting AI jobs -- Using the marketplace -- Managing your wallet -- Monitoring your jobs -- Understanding receipts and proofs - -## Getting Started - -If you're new to AITBC, start with the [Quickstart Guide](../getting-started/quickstart.md). - -## Navigation - -- [Creating Jobs](creating-jobs.md) - Learn to submit AI workloads -- [Marketplace](marketplace.md) - Buy and sell AI services -- [Explorer](explorer.md) - Browse the blockchain -- [Wallet Management](wallet-management.md) - Manage your funds diff --git a/docs/user-guide/wallet-management.md b/docs/user-guide/wallet-management.md deleted file mode 100644 index dcf0f4d8..00000000 --- a/docs/user-guide/wallet-management.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -title: Wallet Management -description: Managing your AITBC wallet ---- - -# Wallet Management - -Your AITBC wallet allows you to store, send, and receive AITBC tokens and interact with the platform. - -## Creating a Wallet - -### New Wallet -```bash -aitbc wallet create -``` - -### Import Existing -```bash -aitbc wallet import -``` - -## Wallet Operations - -### Check Balance -```bash -aitbc wallet balance -``` - -### Send Tokens -```bash -aitbc wallet send
-``` - -### Transaction History -```bash -aitbc wallet history -``` - -## Security - -- Never share your private key -- Use a hardware wallet for large amounts -- Enable two-factor authentication -- Keep backups in secure locations - -## Staking - -Earn rewards by staking your tokens: -```bash -aitbc wallet stake -``` - -## Backup - -Always backup your wallet: -```bash -aitbc wallet backup --output wallet.backup -``` - -## Recovery - -Restore from backup: -```bash -aitbc wallet restore --input wallet.backup -``` diff --git a/docs/BITCOIN-WALLET-SETUP.md b/docs/user/guides/BITCOIN-WALLET-SETUP.md similarity index 100% rename from docs/BITCOIN-WALLET-SETUP.md rename to docs/user/guides/BITCOIN-WALLET-SETUP.md diff --git a/docs/LOCAL_ASSETS_SUMMARY.md b/docs/user/guides/LOCAL_ASSETS_SUMMARY.md similarity index 100% rename from docs/LOCAL_ASSETS_SUMMARY.md rename to docs/user/guides/LOCAL_ASSETS_SUMMARY.md diff --git a/docs/USER-INTERFACE-GUIDE.md b/docs/user/guides/USER-INTERFACE-GUIDE.md similarity index 100% rename from docs/USER-INTERFACE-GUIDE.md rename to docs/user/guides/USER-INTERFACE-GUIDE.md diff --git a/docs/USER-MANAGEMENT-SETUP.md b/docs/user/guides/USER-MANAGEMENT-SETUP.md similarity index 100% rename from docs/USER-MANAGEMENT-SETUP.md rename to docs/user/guides/USER-MANAGEMENT-SETUP.md diff --git a/ecosystem-analytics/analytics_service.py b/ecosystem-analytics/analytics_service.py deleted file mode 100644 index 0f9f58c3..00000000 --- a/ecosystem-analytics/analytics_service.py +++ /dev/null @@ -1,628 +0,0 @@ -""" -Ecosystem Analytics Service for AITBC - -Tracks and analyzes ecosystem metrics including: -- Hackathon participation and outcomes -- Grant program effectiveness -- Extension adoption and usage -- Developer engagement -- Network effects and cross-chain activity -""" - -import asyncio -import json -from datetime import datetime, timedelta -from typing import Dict, List, Any, Optional -from dataclasses import dataclass, asdict -from decimal import Decimal -import pandas as pd -import plotly.graph_objects as go -import plotly.express as px -from sqlalchemy import create_engine, select, func -from sqlalchemy.orm import sessionmaker - -# Configuration - in production, this would come from environment variables or config file -class Settings: - DATABASE_URL = "postgresql://user:pass@localhost/aitbc" - -settings = Settings() - - -@dataclass -class EcosystemMetric: - """Base class for ecosystem metrics""" - timestamp: datetime - metric_name: str - value: float - unit: str - dimensions: Dict[str, Any] - source: str - - -@dataclass -class HackathonMetric: - """Hackathon-specific metrics""" - event_id: str - event_name: str - start_date: datetime - end_date: datetime - participants: int - submissions: int - winners: int - projects_deployed: int - github_stars: int - community_engagement: float - technical_score: float - innovation_score: float - - -@dataclass -class GrantMetric: - """Grant program metrics""" - grant_id: str - project_name: str - amount_awarded: Decimal - amount_disbursed: Decimal - milestones_completed: int - total_milestones: int - users_acquired: int - github_contributors: int - code_commits: int - documentation_score: float - community_score: float - - -@dataclass -class ExtensionMetric: - """Extension/connector metrics""" - extension_id: str - extension_name: str - downloads: int - active_installations: int - api_calls: int - error_rate: float - avg_response_time: float - user_satisfaction: float - integration_count: int - revenue_generated: Decimal - - -class EcosystemAnalyticsService: - """Main analytics service for ecosystem metrics""" - - def __init__(self): - self.engine = create_engine(settings.DATABASE_URL) - self.Session = sessionmaker(bind=self.engine) - self.logger = __import__('logging').getLogger(__name__) - - async def collect_hackathon_metrics(self, event_id: str) -> HackathonMetric: - """Collect metrics for a specific hackathon""" - with self.Session() as db: - # Get event details - event = self._get_hackathon_event(db, event_id) - if not event: - raise ValueError(f"Hackathon {event_id} not found") - - # Collect participant metrics - participants = await self._count_participants(event_id) - submissions = await self._count_submissions(event_id) - - # Get project metrics - projects = await self._get_hackathon_projects(event_id) - projects_deployed = sum(1 for p in projects if p.get('deployed', False)) - - # Calculate engagement scores - community_engagement = await self._calculate_community_engagement(event_id) - technical_scores = [p.get('technical_score', 0) for p in projects] - innovation_scores = [p.get('innovation_score', 0) for p in projects] - - # Get GitHub metrics - github_stars = sum(p.get('github_stars', 0) for p in projects) - - metric = HackathonMetric( - event_id=event_id, - event_name=event['name'], - start_date=event['start_date'], - end_date=event['end_date'], - participants=participants, - submissions=submissions, - winners=len([p for p in projects if p.get('winner', False)]), - projects_deployed=projects_deployed, - github_stars=github_stars, - community_engagement=community_engagement, - technical_score=sum(technical_scores) / len(technical_scores) if technical_scores else 0, - innovation_score=sum(innovation_scores) / len(innovation_scores) if innovation_scores else 0 - ) - - # Store metrics - await self._store_metric(metric) - - return metric - - async def collect_grant_metrics(self, grant_id: str) -> GrantMetric: - """Collect metrics for a specific grant""" - with self.Session() as db: - # Get grant details - grant = self._get_grant_details(db, grant_id) - if not grant: - raise ValueError(f"Grant {grant_id} not found") - - # Get project metrics - project = await self._get_grant_project(grant_id) - - # Calculate completion metrics - milestones_completed = await self._count_completed_milestones(grant_id) - total_milestones = grant.get('total_milestones', 1) - - # Get adoption metrics - users_acquired = await self._count_project_users(grant_id) - github_contributors = await self._count_github_contributors(project.get('repo_url')) - code_commits = await self._count_code_commits(project.get('repo_url')) - - # Calculate quality scores - documentation_score = await self._evaluate_documentation(project.get('docs_url')) - community_score = await self._evaluate_community_health(project.get('repo_url')) - - metric = GrantMetric( - grant_id=grant_id, - project_name=grant['project_name'], - amount_awarded=Decimal(str(grant.get('amount_awarded', 0))), - amount_disbursed=Decimal(str(grant.get('amount_disbursed', 0))), - milestones_completed=milestones_completed, - total_milestones=total_milestones, - users_acquired=users_acquired, - github_contributors=github_contributors, - code_commits=code_commits, - documentation_score=documentation_score, - community_score=community_score - ) - - # Store metrics - await self._store_metric(metric) - - return metric - - async def collect_extension_metrics(self, extension_id: str) -> ExtensionMetric: - """Collect metrics for a specific extension""" - with self.Session() as db: - # Get extension details - extension = self._get_extension_details(db, extension_id) - if not extension: - raise ValueError(f"Extension {extension_id} not found") - - # Get usage metrics - downloads = await self._count_downloads(extension_id) - active_installations = await self._count_active_installations(extension_id) - - # Get performance metrics - api_calls = await self._count_api_calls(extension_id, days=30) - error_rate = await self._calculate_error_rate(extension_id, days=30) - avg_response_time = await self._calculate_avg_response_time(extension_id, days=30) - - # Get quality metrics - user_satisfaction = await self._calculate_user_satisfaction(extension_id) - integration_count = await self._count_integrations(extension_id) - - # Get business metrics - revenue_generated = await self._calculate_revenue(extension_id, days=30) - - metric = ExtensionMetric( - extension_id=extension_id, - extension_name=extension['name'], - downloads=downloads, - active_installations=active_installations, - api_calls=api_calls, - error_rate=error_rate, - avg_response_time=avg_response_time, - user_satisfaction=user_satisfaction, - integration_count=integration_count, - revenue_generated=Decimal(str(revenue_generated)) - ) - - # Store metrics - await self._store_metric(metric) - - return metric - - async def generate_ecosystem_dashboard(self, days: int = 30) -> Dict[str, Any]: - """Generate comprehensive ecosystem dashboard""" - end_date = datetime.utcnow() - start_date = end_date - timedelta(days=days) - - dashboard = { - "period": { - "start": start_date.isoformat(), - "end": end_date.isoformat(), - "days": days - }, - "summary": await self._generate_summary_metrics(start_date, end_date), - "hackathons": await self._generate_hackathon_section(start_date, end_date), - "grants": await self._generate_grant_section(start_date, end_date), - "extensions": await self._generate_extension_section(start_date, end_date), - "network_effects": await self._generate_network_effects(start_date, end_date) - } - - return dashboard - - async def generate_hackathon_report(self, event_id: str) -> Dict[str, Any]: - """Generate detailed hackathon report""" - metric = await self.collect_hackathon_metrics(event_id) - - # Generate visualizations - figures = {} - - # Participation funnel - fig_funnel = go.Figure(go.Funnel( - y=["Registrations", "Active Participants", "Submissions", "Deployed Projects", "Winners"], - x=[ - metric.participants * 1.5, # Estimated registrations - metric.participants, - metric.submissions, - metric.projects_deployed, - metric.winners - ] - )) - fig_funnel.update_layout(title="Hackathon Participation Funnel") - figures['funnel'] = fig_funnel.to_json() - - # Score distribution - fig_scores = go.Figure() - fig_scores.add_trace(go.Scatter( - x=list(range(metric.submissions)), - y=[{'technical_score': 75, 'innovation_score': 85}] * metric.submissions, # Sample data - mode='markers', - name='Projects' - )) - fig_scores.update_layout(title="Project Scores Distribution") - figures['scores'] = fig_scores.to_json() - - # Project categories - categories = ['DeFi', 'Enterprise', 'Developer Tools', 'Analytics', 'Other'] - counts = [15, 20, 10, 8, 12] # Sample data - - fig_categories = px.pie( - values=counts, - names=categories, - title="Project Categories" - ) - figures['categories'] = fig_categories.to_json() - - report = { - "event": asdict(metric), - "figures": figures, - "insights": await self._generate_hackathon_insights(metric), - "recommendations": await self._generate_hackathon_recommendations(metric) - } - - return report - - async def generate_grant_impact_report(self, grant_id: str) -> Dict[str, Any]: - """Generate grant impact report""" - metric = await self.collect_grant_metrics(grant_id) - - # Generate ROI analysis - roi_analysis = await self._calculate_grant_roi(metric) - - # Generate adoption curve - adoption_data = await self._get_adoption_curve(grant_id) - - fig_adoption = px.line( - x=[d['date'] for d in adoption_data], - y=[d['users'] for d in adoption_data], - title="User Adoption Over Time" - ) - - report = { - "grant": asdict(metric), - "roi_analysis": roi_analysis, - "adoption_chart": fig_adoption.to_json(), - "milestone_progress": { - "completed": metric.milestones_completed, - "total": metric.total_milestones, - "percentage": (metric.milestones_completed / metric.total_milestones * 100) if metric.total_milestones > 0 else 0 - }, - "quality_metrics": { - "documentation": metric.documentation_score, - "community": metric.community_score, - "overall": (metric.documentation_score + metric.community_score) / 2 - } - } - - return report - - async def export_metrics(self, metric_type: str, format: str = "csv") -> bytes: - """Export metrics in specified format""" - # Get metrics data - if metric_type == "hackathons": - data = await self._get_all_hackathon_metrics() - elif metric_type == "grants": - data = await self._get_all_grant_metrics() - elif metric_type == "extensions": - data = await self._get_all_extension_metrics() - else: - raise ValueError(f"Unknown metric type: {metric_type}") - - # Convert to DataFrame - df = pd.DataFrame([asdict(m) for m in data]) - - # Export in requested format - if format == "csv": - return df.to_csv(index=False).encode('utf-8') - elif format == "json": - return df.to_json(orient='records', indent=2).encode('utf-8') - elif format == "excel": - return df.to_excel(index=False).encode('utf-8') - else: - raise ValueError(f"Unsupported format: {format}") - - # Private helper methods - - async def _store_metric(self, metric: Any): - """Store metric in database""" - # Implementation would store in metrics table - pass - - async def _count_participants(self, event_id: str) -> int: - """Count hackathon participants""" - # Implementation would query participant data - return 150 # Sample - - async def _count_submissions(self, event_id: str) -> int: - """Count hackathon submissions""" - return 45 # Sample - - async def _get_hackathon_projects(self, event_id: str) -> List[Dict]: - """Get all projects from hackathon""" - # Implementation would query project data - return [] # Sample - - async def _calculate_community_engagement(self, event_id: str) -> float: - """Calculate community engagement score""" - return 85.5 # Sample - - async def _count_completed_milestones(self, grant_id: str) -> int: - """Count completed grant milestones""" - return 3 # Sample - - async def _count_project_users(self, grant_id: str) -> int: - """Count users of grant project""" - return 500 # Sample - - async def _count_github_contributors(self, repo_url: str) -> int: - """Count GitHub contributors""" - return 12 # Sample - - async def _count_code_commits(self, repo_url: str) -> int: - """Count code commits""" - return 234 # Sample - - async def _evaluate_documentation(self, docs_url: str) -> float: - """Evaluate documentation quality""" - return 90.0 # Sample - - async def _evaluate_community_health(self, repo_url: str) -> float: - """Evaluate community health""" - return 75.5 # Sample - - async def _count_downloads(self, extension_id: str) -> int: - """Count extension downloads""" - return 1250 # Sample - - async def _count_active_installations(self, extension_id: str) -> int: - """Count active installations""" - return 350 # Sample - - async def _count_api_calls(self, extension_id: str, days: int) -> int: - """Count API calls to extension""" - return 15000 # Sample - - async def _calculate_error_rate(self, extension_id: str, days: int) -> float: - """Calculate error rate""" - return 0.02 # Sample - - async def _calculate_avg_response_time(self, extension_id: str, days: int) -> float: - """Calculate average response time""" - return 125.5 # Sample - - async def _calculate_user_satisfaction(self, extension_id: str) -> float: - """Calculate user satisfaction score""" - return 4.5 # Sample - - async def _count_integrations(self, extension_id: str) -> int: - """Count integrations using extension""" - return 25 # Sample - - async def _calculate_revenue(self, extension_id: str, days: int) -> float: - """Calculate revenue generated""" - return 5000.0 # Sample - - async def _generate_summary_metrics(self, start_date: datetime, end_date: datetime) -> Dict: - """Generate summary metrics for dashboard""" - return { - "total_hackathons": 4, - "total_participants": 600, - "total_grants_awarded": 12, - "total_grant_amount": 500000, - "active_extensions": 25, - "total_downloads": 50000, - "github_stars": 2500, - "community_members": 1500 - } - - async def _generate_hackathon_section(self, start_date: datetime, end_date: datetime) -> Dict: - """Generate hackathon section of dashboard""" - return { - "upcoming": [], - "recent": [], - "top_projects": [], - "participation_trend": [] - } - - async def _generate_grant_section(self, start_date: datetime, end_date: datetime) -> Dict: - """Generate grant section of dashboard""" - return { - "active_grants": 8, - "completed_grants": 4, - "total_disbursed": 350000, - "roi_average": 2.5, - "success_rate": 0.85 - } - - async def _generate_extension_section(self, start_date: datetime, end_date: datetime) -> Dict: - """Generate extension section of dashboard""" - return { - "total_extensions": 25, - "new_extensions": 3, - "most_popular": [], - "growth_rate": 0.15 - } - - async def _generate_network_effects(self, start_date: datetime, end_date: datetime) -> Dict: - """Generate network effects analysis""" - return { - "cross_chain_volume": 1000000, - "interoperability_score": 85.5, - "network_value": 25000000, - "metcalfe_coefficient": 1.2 - } - - async def _generate_hackathon_insights(self, metric: HackathonMetric) -> List[str]: - """Generate insights from hackathon metrics""" - insights = [] - - if metric.projects_deployed / metric.submissions > 0.5: - insights.append("High deployment rate indicates strong technical execution") - - if metric.community_engagement > 80: - insights.append("Excellent community engagement and participation") - - if metric.github_stars > 100: - insights.append("Strong GitHub community interest") - - return insights - - async def _generate_hackathon_recommendations(self, metric: HackathonMetric) -> List[str]: - """Generate recommendations for improvement""" - recommendations = [] - - if metric.projects_deployed / metric.submissions < 0.3: - recommendations.append("Provide more deployment support and infrastructure") - - if metric.technical_score < 70: - recommendations.append("Offer technical workshops and mentorship") - - if metric.innovation_score < 70: - recommendations.append("Encourage more innovative and ambitious projects") - - return recommendations - - async def _calculate_grant_roi(self, metric: GrantMetric) -> Dict: - """Calculate grant ROI""" - if metric.amount_disbursed == 0: - return {"roi": 0, "payback_period": None} - - # Simplified ROI calculation - estimated_value = metric.users_acquired * 100 # $100 per user - roi = (estimated_value - float(metric.amount_disbursed)) / float(metric.amount_disbursed) - - return { - "roi": roi, - "payback_period": "12 months" if roi > 0 else None, - "value_created": estimated_value - } - - async def _get_adoption_curve(self, grant_id: str) -> List[Dict]: - """Get user adoption over time""" - # Sample data - return [ - {"date": "2024-01-01", "users": 50}, - {"date": "2024-02-01", "users": 120}, - {"date": "2024-03-01", "users": 200}, - {"date": "2024-04-01", "users": 350}, - {"date": "2024-05-01", "users": 500} - ] - - def _get_hackathon_event(self, db, event_id: str) -> Optional[Dict]: - """Get hackathon event details""" - # Implementation would query database - return { - "name": "DeFi Innovation Hackathon", - "start_date": datetime(2024, 1, 15), - "end_date": datetime(2024, 1, 22) - } - - def _get_grant_details(self, db, grant_id: str) -> Optional[Dict]: - """Get grant details""" - # Implementation would query database - return { - "project_name": "Advanced Analytics Platform", - "amount_awarded": 50000, - "amount_disbursed": 25000, - "total_milestones": 4 - } - - def _get_extension_details(self, db, extension_id: str) -> Optional[Dict]: - """Get extension details""" - # Implementation would query database - return { - "name": "SAP ERP Connector" - } - - async def _get_grant_project(self, grant_id: str) -> Dict: - """Get grant project details""" - return { - "repo_url": "https://github.com/example/project", - "docs_url": "https://docs.example.com" - } - - async def _get_all_hackathon_metrics(self) -> List[HackathonMetric]: - """Get all hackathon metrics""" - # Implementation would query database - return [] - - async def _get_all_grant_metrics(self) -> List[GrantMetric]: - """Get all grant metrics""" - # Implementation would query database - return [] - - async def _get_all_extension_metrics(self) -> List[ExtensionMetric]: - """Get all extension metrics""" - # Implementation would query database - return [] - - -# CLI interface for analytics service -async def main(): - """CLI entry point""" - import argparse - - parser = argparse.ArgumentParser(description="AITBC Ecosystem Analytics") - parser.add_argument("--dashboard", action="store_true", help="Generate ecosystem dashboard") - parser.add_argument("--hackathon", help="Generate hackathon report for event ID") - parser.add_argument("--grant", help="Generate grant impact report for grant ID") - parser.add_argument("--export", choices=["hackathons", "grants", "extensions"], help="Export metrics") - parser.add_argument("--format", choices=["csv", "json", "excel"], default="json", help="Export format") - parser.add_argument("--days", type=int, default=30, help="Number of days for dashboard") - - args = parser.parse_args() - - service = EcosystemAnalyticsService() - - if args.dashboard: - dashboard = await service.generate_ecosystem_dashboard(args.days) - print(json.dumps(dashboard, indent=2, default=str)) - elif args.hackathon: - report = await service.generate_hackathon_report(args.hackathon) - print(json.dumps(report, indent=2, default=str)) - elif args.grant: - report = await service.generate_grant_impact_report(args.grant) - print(json.dumps(report, indent=2, default=str)) - elif args.export: - data = await service.export_metrics(args.export, args.format) - print(data.decode()) - else: - parser.print_help() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/ecosystem-analytics/kpi_tracker.py b/ecosystem-analytics/kpi_tracker.py deleted file mode 100644 index 02a32ce9..00000000 --- a/ecosystem-analytics/kpi_tracker.py +++ /dev/null @@ -1,927 +0,0 @@ -""" -Ecosystem KPI Tracker for AITBC -Tracks key performance indicators for ecosystem health and strategy reviews -""" - -import asyncio -import json -from datetime import datetime, timedelta -from typing import Dict, List, Any, Optional -from dataclasses import dataclass, asdict -from decimal import Decimal -import pandas as pd -import plotly.graph_objects as go -import plotly.express as px -from sqlalchemy import create_engine, select, func, and_, or_ -from sqlalchemy.orm import sessionmaker -from enum import Enum - -from ..config import settings -from ..database import get_db - - -class KPICategory(Enum): - """Categories of KPIs""" - MARKETPLACE = "marketplace" - CROSS_CHAIN = "cross_chain" - DEVELOPER = "developer" - USER = "user" - FINANCIAL = "financial" - TECHNICAL = "technical" - - -@dataclass -class KPIDefinition: - """Definition of a KPI""" - name: str - category: KPICategory - description: str - unit: str - target: Optional[float] - calculation_method: str - data_sources: List[str] - frequency: str # daily, weekly, monthly - importance: str # high, medium, low - - -@dataclass -class KPIValue: - """A single KPI measurement""" - timestamp: datetime - kpi_name: str - value: float - unit: str - category: str - metadata: Dict[str, Any] - - -class EcosystemKPITracker: - """Main KPI tracking system""" - - def __init__(self): - self.engine = create_engine(settings.DATABASE_URL) - self.Session = sessionmaker(bind=self.engine) - self.logger = __import__('logging').getLogger(__name__) - - # Define all tracked KPIs - self.kpi_definitions = self._initialize_kpi_definitions() - - def _initialize_kpi_definitions(self) -> Dict[str, KPIDefinition]: - """Initialize all KPI definitions""" - return { - # Marketplace KPIs - "active_marketplaces": KPIDefinition( - name="active_marketplaces", - category=KPICategory.MARKETPLACE, - description="Number of active marketplaces on the platform", - unit="count", - target=50.0, - calculation_method="count_active_marketplaces", - data_sources=["marketplace_service", "tenant_db"], - frequency="daily", - importance="high" - ), - "total_volume_usd": KPIDefinition( - name="total_volume_usd", - category=KPICategory.MARKETPLACE, - description="Total transaction volume in USD", - unit="USD", - target=10000000.0, - calculation_method="sum_transaction_volume", - data_sources=["transaction_db", "price_oracle"], - frequency="daily", - importance="high" - ), - "marketplace_utilization": KPIDefinition( - name="marketplace_utilization", - category=KPICategory.MARKETPLACE, - description="Percentage of utilized marketplace capacity", - unit="percent", - target=75.0, - calculation_method="calculate_utilization", - data_sources=["marketplace_service", "usage_metrics"], - frequency="hourly", - importance="medium" - ), - - # Cross-Chain KPIs - "cross_chain_volume": KPIDefinition( - name="cross_chain_volume", - category=KPICategory.CROSS_CHAIN, - description="Total cross-chain transaction volume", - unit="USD", - target=5000000.0, - calculation_method="sum_cross_chain_volume", - data_sources=["bridge_service", "transaction_db"], - frequency="daily", - importance="high" - ), - "active_bridges": KPIDefinition( - name="active_bridges", - category=KPICategory.CROSS_CHAIN, - description="Number of active cross-chain bridges", - unit="count", - target=10.0, - calculation_method="count_active_bridges", - data_sources=["bridge_service"], - frequency="daily", - importance="medium" - ), - "bridge_success_rate": KPIDefinition( - name="bridge_success_rate", - category=KPICategory.CROSS_CHAIN, - description="Success rate of cross-chain transactions", - unit="percent", - target=95.0, - calculation_method="calculate_bridge_success_rate", - data_sources=["bridge_service", "transaction_db"], - frequency="hourly", - importance="high" - ), - - # Developer KPIs - "active_developers": KPIDefinition( - name="active_developers", - category=KPICategory.DEVELOPER, - description="Number of active developers in ecosystem", - unit="count", - target=1000.0, - calculation_method="count_active_developers", - data_sources=["github_api", "developer_db"], - frequency="weekly", - importance="high" - ), - "new_extensions": KPIDefinition( - name="new_extensions", - category=KPICategory.DEVELOPER, - description="Number of new marketplace extensions created", - unit="count", - target=25.0, - calculation_method="count_new_extensions", - data_sources=["extension_registry", "github_api"], - frequency="weekly", - importance="medium" - ), - "developer_satisfaction": KPIDefinition( - name="developer_satisfaction", - category=KPICategory.DEVELOPER, - description="Developer satisfaction score (1-5)", - unit="score", - target=4.5, - calculation_method="calculate_satisfaction_score", - data_sources=["surveys", "github_issues", "discord_sentiment"], - frequency="monthly", - importance="medium" - ), - - # User KPIs - "active_users": KPIDefinition( - name="active_users", - category=KPICategory.USER, - description="Number of active users (30-day)", - unit="count", - target=10000.0, - calculation_method="count_active_users", - data_sources=["user_db", "auth_service"], - frequency="daily", - importance="high" - ), - "user_retention": KPIDefinition( - name="user_retention", - category=KPICategory.USER, - description="30-day user retention rate", - unit="percent", - target=80.0, - calculation_method="calculate_retention_rate", - data_sources=["user_db", "analytics_service"], - frequency="weekly", - importance="high" - ), - "net_promoter_score": KPIDefinition( - name="net_promoter_score", - category=KPICategory.USER, - description="Net Promoter Score", - unit="score", - target=50.0, - calculation_method="calculate_nps", - data_sources=["surveys", "feedback_service"], - frequency="monthly", - importance="medium" - ), - - # Financial KPIs - "revenue": KPIDefinition( - name="revenue", - category=KPICategory.FINANCIAL, - description="Total platform revenue", - unit="USD", - target=1000000.0, - calculation_method="calculate_revenue", - data_sources=["billing_service", "payment_processor"], - frequency="monthly", - importance="high" - ), - "cost_per_transaction": KPIDefinition( - name="cost_per_transaction", - category=KPICategory.FINANCIAL, - description="Average cost per transaction", - unit="USD", - target=0.10, - calculation_method="calculate_cost_per_tx", - data_sources=["billing_service", "metrics_service"], - frequency="monthly", - importance="medium" - ), - "profit_margin": KPIDefinition( - name="profit_margin", - category=KPICategory.FINANCIAL, - description="Platform profit margin", - unit="percent", - target=20.0, - calculation_method="calculate_profit_margin", - data_sources=["billing_service", "financial_db"], - frequency="quarterly", - importance="high" - ), - - # Technical KPIs - "network_hash_rate": KPIDefinition( - name="network_hash_rate", - category=KPICategory.TECHNICAL, - description="Network hash rate", - unit="H/s", - target=1000000000.0, - calculation_method="get_hash_rate", - data_sources=["blockchain_node", "metrics_service"], - frequency="hourly", - importance="high" - ), - "block_time": KPIDefinition( - name="block_time", - category=KPICategory.TECHNICAL, - description="Average block time", - unit="seconds", - target=12.0, - calculation_method="calculate_average_block_time", - data_sources=["blockchain_node", "block_db"], - frequency="hourly", - importance="high" - ), - "uptime": KPIDefinition( - name="uptime", - category=KPICategory.TECHNICAL, - description="Platform uptime percentage", - unit="percent", - target=99.9, - calculation_method="calculate_uptime", - data_sources=["monitoring_service", "health_checks"], - frequency="daily", - importance="high" - ), - } - - async def collect_all_kpis(self, period: str = "daily") -> List[KPIValue]: - """Collect all KPIs for a given period""" - kpi_values = [] - - for kpi_name, kpi_def in self.kpi_definitions.items(): - if kpi_def.frequency == period or period == "all": - try: - value = await self._calculate_kpi(kpi_name, kpi_def) - kpi_value = KPIValue( - timestamp=datetime.utcnow(), - kpi_name=kpi_name, - value=value, - unit=kpi_def.unit, - category=kpi_def.category.value, - metadata={ - "target": kpi_def.target, - "importance": kpi_def.importance, - } - ) - kpi_values.append(kpi_value) - except Exception as e: - self.logger.error(f"Failed to calculate KPI {kpi_name}: {e}") - - # Store KPIs - await self._store_kpis(kpi_values) - - return kpi_values - - async def _calculate_kpi(self, kpi_name: str, kpi_def: KPIDefinition) -> float: - """Calculate a specific KPI""" - method_name = kpi_def.calculation_method - method = getattr(self, method_name, None) - - if method is None: - raise ValueError(f"Unknown calculation method: {method_name}") - - return await method() - - async def _store_kpis(self, kpi_values: List[KPIValue]): - """Store KPI values in database""" - with self.Session() as db: - for kpi in kpi_values: - # Implementation would store in KPI table - pass - - # KPI Calculation Methods - - async def count_active_marketplaces(self) -> float: - """Count active marketplaces""" - with self.Session() as db: - # Query active tenants with marketplace enabled - count = db.execute( - select(func.count(Tenant.id)) - .where( - and_( - Tenant.status == "active", - Tenant.features.contains(["marketplace"]) - ) - ) - ).scalar() - return float(count) - - async def sum_transaction_volume(self) -> float: - """Sum total transaction volume in USD""" - with self.Session() as db: - # Get transactions in last 24 hours - total = db.execute( - select(func.sum(Transaction.amount_usd)) - .where( - Transaction.timestamp >= datetime.utcnow() - timedelta(days=1) - ) - ).scalar() - return float(total or 0) - - async def calculate_utilization(self) -> float: - """Calculate marketplace utilization percentage""" - # Get total capacity and used capacity - total_capacity = await self._get_total_capacity() - used_capacity = await self._get_used_capacity() - - if total_capacity == 0: - return 0.0 - - return (used_capacity / total_capacity) * 100 - - async def sum_cross_chain_volume(self) -> float: - """Sum cross-chain transaction volume""" - with self.Session() as db: - total = db.execute( - select(func.sum(CrossChainTransaction.amount_usd)) - .where( - CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(days=1) - ) - ).scalar() - return float(total or 0) - - async def count_active_bridges(self) -> float: - """Count active cross-chain bridges""" - # Query bridge service - bridges = await self._query_bridge_service("/bridges?status=active") - return float(len(bridges)) - - async def calculate_bridge_success_rate(self) -> float: - """Calculate bridge transaction success rate""" - with self.Session() as db: - total = db.execute( - select(func.count(CrossChainTransaction.id)) - .where( - CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(hours=24) - ) - ).scalar() - - successful = db.execute( - select(func.count(CrossChainTransaction.id)) - .where( - and_( - CrossChainTransaction.timestamp >= datetime.utcnow() - timedelta(hours=24), - CrossChainTransaction.status == "completed" - ) - ) - ).scalar() - - if total == 0: - return 100.0 - - return (successful / total) * 100 - - async def count_active_developers(self) -> float: - """Count active developers (last 30 days)""" - # Query GitHub API and local records - github_contributors = await self._query_github_api("/contributors") - local_developers = await self._count_local_developers() - - # Combine and deduplicate - all_developers = set(github_contributors + local_developers) - return float(len(all_developers)) - - async def count_new_extensions(self) -> float: - """Count new extensions this week""" - with self.Session() as db: - count = db.execute( - select(func.count(Extension.id)) - .where( - Extension.created_at >= datetime.utcnow() - timedelta(weeks=1) - ) - ).scalar() - return float(count) - - async def calculate_satisfaction_score(self) -> float: - """Calculate developer satisfaction score""" - # Aggregate from multiple sources - survey_scores = await self._get_survey_scores() - issue_sentiment = await self._analyze_issue_sentiment() - discord_sentiment = await self._analyze_discord_sentiment() - - # Weighted average - weights = {"survey": 0.5, "issues": 0.25, "discord": 0.25} - - score = ( - survey_scores * weights["survey"] + - issue_sentiment * weights["issues"] + - discord_sentiment * weights["discord"] - ) - - return score - - async def count_active_users(self) -> float: - """Count active users (last 30 days)""" - with self.Session() as db: - count = db.execute( - select(func.count(User.id)) - .where( - User.last_active >= datetime.utcnow() - timedelta(days=30) - ) - ).scalar() - return float(count) - - async def calculate_retention_rate(self) -> float: - """Calculate 30-day user retention rate""" - # Cohort analysis - cohort_users = await self._get_cohort_users(30) # Users from 30 days ago - retained_users = await self._count_retained_users(cohort_users) - - if not cohort_users: - return 0.0 - - return (retained_users / len(cohort_users)) * 100 - - async def calculate_nps(self) -> float: - """Calculate Net Promoter Score""" - responses = await self._get_nps_responses() - - if not responses: - return 0.0 - - promoters = sum(1 for r in responses if r >= 9) - detractors = sum(1 for r in responses if r <= 6) - - nps = ((promoters - detractors) / len(responses)) * 100 - return nps - - async def calculate_revenue(self) -> float: - """Calculate total platform revenue""" - with self.Session() as db: - total = db.execute( - select(func.sum(Revenue.amount)) - .where( - Revenue.period == "monthly" - ) - ).scalar() - return float(total or 0) - - async def calculate_cost_per_tx(self) -> float: - """Calculate cost per transaction""" - total_cost = await self._get_monthly_costs() - tx_count = await self._get_monthly_tx_count() - - if tx_count == 0: - return 0.0 - - return total_cost / tx_count - - async def calculate_profit_margin(self) -> float: - """Calculate profit margin percentage""" - revenue = await self.calculate_revenue() - costs = await self._get_monthly_costs() - - if revenue == 0: - return 0.0 - - profit = revenue - costs - return (profit / revenue) * 100 - - async def get_hash_rate(self) -> float: - """Get current network hash rate""" - # Query blockchain node metrics - metrics = await self._query_blockchain_metrics() - return float(metrics.get("hash_rate", 0)) - - async def calculate_average_block_time(self) -> float: - """Calculate average block time""" - with self.Session() as db: - avg_time = db.execute( - select(func.avg(Block.timestamp_diff)) - .where( - Block.timestamp >= datetime.utcnow() - timedelta(hours=1) - ) - ).scalar() - return float(avg_time or 0) - - async def calculate_uptime(self) -> float: - """Calculate platform uptime percentage""" - # Get uptime from monitoring service - uptime_data = await self._query_monitoring_service("/uptime") - return float(uptime_data.get("uptime_percentage", 0)) - - # Helper methods for data collection - - async def _get_total_capacity(self) -> float: - """Get total marketplace capacity""" - # Implementation would query marketplace service - return 10000.0 # Sample - - async def _get_used_capacity(self) -> float: - """Get used marketplace capacity""" - # Implementation would query usage metrics - return 7500.0 # Sample - - async def _query_bridge_service(self, endpoint: str) -> List[Dict]: - """Query bridge service API""" - # Implementation would make HTTP request - return [] # Sample - - async def _query_github_api(self, endpoint: str) -> List[str]: - """Query GitHub API""" - # Implementation would use GitHub API - return [] # Sample - - async def _count_local_developers(self) -> List[str]: - """Count local developers""" - with self.Session() as db: - developers = db.execute( - select(Developer.github_username) - .where( - Developer.last_active >= datetime.utcnow() - timedelta(days=30) - ) - ).all() - return [d[0] for d in developers] - - async def _get_survey_scores(self) -> float: - """Get survey satisfaction scores""" - # Implementation would query survey service - return 4.2 # Sample - - async def _analyze_issue_sentiment(self) -> float: - """Analyze GitHub issue sentiment""" - # Implementation would use sentiment analysis - return 3.8 # Sample - - async def _analyze_discord_sentiment(self) -> float: - """Analyze Discord message sentiment""" - # Implementation would use sentiment analysis - return 4.0 # Sample - - async def _get_cohort_users(self, days_ago: int) -> List[str]: - """Get users from a specific cohort""" - with self.Session() as db: - cohort_date = datetime.utcnow() - timedelta(days=days_ago) - users = db.execute( - select(User.id) - .where( - and_( - User.created_at >= cohort_date, - User.created_at < cohort_date + timedelta(days=1) - ) - ) - ).all() - return [u[0] for u in users] - - async def _count_retained_users(self, user_ids: List[str]) -> int: - """Count how many users are still active""" - with self.Session() as db: - count = db.execute( - select(func.count(User.id)) - .where( - and_( - User.id.in_(user_ids), - User.last_active >= datetime.utcnow() - timedelta(days=30) - ) - ) - ).scalar() - return count - - async def _get_nps_responses(self) -> List[int]: - """Get NPS survey responses""" - # Implementation would query survey service - return [9, 10, 8, 7, 9, 10, 6, 9] # Sample - - async def _get_monthly_costs(self) -> float: - """Get monthly operational costs""" - # Implementation would query financial service - return 800000.0 # Sample - - async def _get_monthly_tx_count(self) -> int: - """Get monthly transaction count""" - with self.Session() as db: - count = db.execute( - select(func.count(Transaction.id)) - .where( - Transaction.timestamp >= datetime.utcnow() - timedelta(days=30) - ) - ).scalar() - return count - - async def _query_blockchain_metrics(self) -> Dict[str, float]: - """Query blockchain node metrics""" - # Implementation would query blockchain node - return {"hash_rate": 1000000000.0} # Sample - - async def _query_monitoring_service(self, endpoint: str) -> Dict[str, float]: - """Query monitoring service""" - # Implementation would query monitoring service - return {"uptime_percentage": 99.95} # Sample - - async def generate_kpi_dashboard(self, period: str = "monthly") -> Dict[str, Any]: - """Generate comprehensive KPI dashboard""" - # Collect all KPIs - kpis = await self.collect_all_kpis("all") - - # Group by category - by_category = {} - for kpi in kpis: - if kpi.category not in by_category: - by_category[kpi.category] = [] - by_category[kpi.category].append(kpi) - - # Calculate health scores - health_scores = await self._calculate_health_scores(by_category) - - # Generate insights - insights = await self._generate_insights(kpis) - - # Create visualizations - charts = await self._create_charts(kpis) - - return { - "timestamp": datetime.utcnow().isoformat(), - "period": period, - "kpis": [asdict(kpi) for kpi in kpis], - "by_category": { - cat: [asdict(kpi) for kpi in kpis] - for cat, kpis in by_category.items() - }, - "health_scores": health_scores, - "insights": insights, - "charts": charts, - } - - async def _calculate_health_scores(self, by_category: Dict[str, List[KPIValue]]) -> Dict[str, float]: - """Calculate health scores for each category""" - scores = {} - - for category, kpis in by_category.items(): - if not kpis: - scores[category] = 0.0 - continue - - # Weight by importance - total_score = 0.0 - total_weight = 0.0 - - for kpi in kpis: - target = kpi.metadata.get("target", 0) - if target == 0: - continue - - # Calculate score as percentage of target - score = min((kpi.value / target) * 100, 100) - - # Apply importance weight - weight = {"high": 3, "medium": 2, "low": 1}.get( - kpi.metadata.get("importance", "medium"), 2 - ) - - total_score += score * weight - total_weight += weight - - if total_weight > 0: - scores[category] = total_score / total_weight - else: - scores[category] = 0.0 - - return scores - - async def _generate_insights(self, kpis: List[KPIValue]) -> List[str]: - """Generate insights from KPI data""" - insights = [] - - # Analyze trends - for kpi in kpis: - if kpi.value < (kpi.metadata.get("target", 0) * 0.8): - insights.append( - f"⚠️ {kpi.kpi_name} is below target ({kpi.value:.2f} vs {kpi.metadata.get('target')})" - ) - elif kpi.value > (kpi.metadata.get("target", 0) * 1.2): - insights.append( - f"🎉 {kpi.kpi_name} exceeds target ({kpi.value:.2f} vs {kpi.metadata.get('target')})" - ) - - # Cross-category insights - marketplace_kpis = [k for k in kpis if k.category == "marketplace"] - if marketplace_kpis: - volume_kpi = next((k for k in marketplace_kpis if k.kpi_name == "total_volume_usd"), None) - utilization_kpi = next((k for k in marketplace_kpis if k.kpi_name == "marketplace_utilization"), None) - - if volume_kpi and utilization_kpi: - if volume_kpi.value > 1000000 and utilization_kpi.value < 50: - insights.append( - "💡 High volume but low utilization - consider increasing capacity" - ) - - return insights[:10] # Limit to top 10 insights - - async def _create_charts(self, kpis: List[KPIValue]) -> Dict[str, str]: - """Create chart visualizations""" - charts = {} - - # KPI gauge charts - for kpi in kpis[:5]: # Limit to top 5 - fig = go.Figure(go.Indicator( - mode = "gauge+number+delta", - value = kpi.value, - domain = {'x': [0, 1], 'y': [0, 1]}, - title = {'text': kpi.kpi_name}, - delta = {'reference': kpi.metadata.get('target', 0)}, - gauge = { - 'axis': {'range': [None, kpi.metadata.get('target', 100) * 1.5]}, - 'bar': {'color': "darkblue"}, - 'steps': [ - {'range': [0, kpi.metadata.get('target', 100) * 0.5], 'color': "lightgray"}, - {'range': [kpi.metadata.get('target', 100) * 0.5, kpi.metadata.get('target', 100)], 'color': "gray"} - ], - 'threshold': { - 'line': {'color': "red", 'width': 4}, - 'thickness': 0.75, - 'value': kpi.metadata.get('target', 100) * 0.9 - } - } - )) - - charts[f"gauge_{kpi.kpi_name}"] = fig.to_json() - - # Category comparison chart - categories = {} - for kpi in kpis: - if kpi.category not in categories: - categories[kpi.category] = [] - categories[kpi.category].append(kpi.value / (kpi.metadata.get('target', 1) * 100)) - - fig = px.bar( - x=list(categories.keys()), - y=[sum(v)/len(v) for v in categories.values()], - title="KPI Performance by Category", - labels={"x": "Category", "y": "Average % of Target"} - ) - charts["category_comparison"] = fig.to_json() - - return charts - - async def export_kpis(self, format: str = "csv", period: str = "monthly") -> bytes: - """Export KPI data""" - kpis = await self.collect_all_kpis(period) - - # Convert to DataFrame - df = pd.DataFrame([asdict(kpi) for kpi in kpis]) - - if format == "csv": - return df.to_csv(index=False).encode('utf-8') - elif format == "json": - return df.to_json(orient='records', indent=2).encode('utf-8') - elif format == "excel": - return df.to_excel(index=False).encode('utf-8') - else: - raise ValueError(f"Unsupported format: {format}") - - async def generate_strategy_review(self, quarter: str) -> Dict[str, Any]: - """Generate quarterly strategy review document""" - # Get KPI data for the quarter - kpis = await self.collect_all_kpis("all") - - # Compare with previous quarter - previous_kpis = await self._get_previous_quarter_kpis(quarter) - - # Generate analysis - analysis = { - "quarter": quarter, - "executive_summary": await self._generate_executive_summary(kpis, previous_kpis), - "key_achievements": await self._identify_achievements(kpis), - "challenges": await self._identify_challenges(kpis), - "recommendations": await self._generate_recommendations(kpis, previous_kpis), - "next_quarter_goals": await self._set_next_quarter_goals(kpis), - } - - return analysis - - async def _get_previous_quarter_kpis(self, quarter: str) -> List[KPIValue]: - """Get KPIs from previous quarter""" - # Implementation would query historical KPI data - return [] # Sample - - async def _generate_executive_summary(self, kpis: List[KPIValue], previous: List[KPIValue]) -> str: - """Generate executive summary""" - # Implementation would analyze KPI trends - return "Ecosystem shows strong growth with 25% increase in active users and 40% growth in transaction volume." - - async def _identify_achievements(self, kpis: List[KPIValue]) -> List[str]: - """Identify key achievements""" - achievements = [] - - for kpi in kpis: - if kpi.value >= kpi.metadata.get("target", 0): - achievements.append( - f"Exceeded {kpi.kpi_name} target with {kpi.value:.2f} (target: {kpi.metadata.get('target')})" - ) - - return achievements - - async def _identify_challenges(self, kpis: List[KPIValue]) -> List[str]: - """Identify challenges and areas for improvement""" - challenges = [] - - for kpi in kpis: - if kpi.value < (kpi.metadata.get("target", 0) * 0.7): - challenges.append( - f"{kpi.kpi_name} below target at {kpi.value:.2f} (target: {kpi.metadata.get('target')})" - ) - - return challenges - - async def _generate_recommendations(self, kpis: List[KPIValue], previous: List[KPIValue]) -> List[str]: - """Generate strategic recommendations""" - recommendations = [] - - # Analyze trends and generate recommendations - recommendations.extend([ - "Focus on improving developer onboarding to increase extension creation", - "Invest in cross-chain infrastructure to support growing volume", - "Enhance user retention programs to improve 30-day retention rate", - ]) - - return recommendations - - async def _set_next_quarter_goals(self, kpis: List[KPIValue]) -> Dict[str, float]: - """Set goals for next quarter""" - goals = {} - - for kpi in kpis: - # Set goals 10-20% higher than current performance - current_target = kpi.metadata.get("target", kpi.value) - next_target = current_target * 1.15 - goals[kpi.kpi_name] = next_target - - return goals - - -# CLI interface -async def main(): - """CLI entry point""" - import argparse - - parser = argparse.ArgumentParser(description="AITBC Ecosystem KPI Tracker") - parser.add_argument("--collect", action="store_true", help="Collect all KPIs") - parser.add_argument("--dashboard", action="store_true", help="Generate KPI dashboard") - parser.add_argument("--export", choices=["csv", "json", "excel"], help="Export KPIs") - parser.add_argument("--period", default="daily", help="Period for KPI collection") - parser.add_argument("--strategy-review", help="Generate strategy review for quarter") - - args = parser.parse_args() - - tracker = EcosystemKPITracker() - - if args.collect: - kpis = await tracker.collect_all_kpis(args.period) - print(f"Collected {len(kpis)} KPIs") - for kpi in kpis: - print(f"{kpi.kpi_name}: {kpi.value:.2f} {kpi.unit}") - - elif args.dashboard: - dashboard = await tracker.generate_kpi_dashboard() - print(json.dumps(dashboard, indent=2, default=str)) - - elif args.export: - data = await tracker.export_kpis(args.export, args.period) - print(data.decode()) - - elif args.strategy_review: - review = await tracker.generate_strategy_review(args.strategy_review) - print(json.dumps(review, indent=2, default=str)) - - else: - parser.print_help() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/ecosystem-certification/registry/api-specification.yaml b/ecosystem-certification/registry/api-specification.yaml deleted file mode 100644 index e1cec59a..00000000 --- a/ecosystem-certification/registry/api-specification.yaml +++ /dev/null @@ -1,635 +0,0 @@ -openapi: 3.0.3 -info: - title: AITBC Ecosystem Registry API - description: Public registry API for certified AITBC partners, SDKs, and integrations - version: 1.0.0 - contact: - name: AITBC Ecosystem Team - email: ecosystem@aitbc.io - license: - name: MIT - url: https://opensource.org/licenses/MIT - -servers: - - url: https://registry.aitbc.io/api/v1 - description: Production server - - url: https://staging-registry.aitbc.io/api/v1 - description: Staging server - -paths: - /partners: - get: - summary: List certified partners - description: Retrieve a paginated list of all certified partners - tags: - - Partners - parameters: - - name: level - in: query - schema: - type: string - enum: [bronze, silver, gold] - description: Filter by certification level - - name: language - in: query - schema: - type: string - description: Filter by SDK language - - name: category - in: query - schema: - type: string - enum: [payment, erp, analytics, infrastructure] - description: Filter by partner category - - name: status - in: query - schema: - type: string - enum: [active, suspended, expired] - description: Filter by certification status - - name: page - in: query - schema: - type: integer - default: 1 - description: Page number - - name: limit - in: query - schema: - type: integer - default: 20 - maximum: 100 - description: Items per page - responses: - '200': - description: Successful response - content: - application/json: - schema: - type: object - properties: - partners: - type: array - items: - $ref: '#/components/schemas/PartnerSummary' - pagination: - $ref: '#/components/schemas/Pagination' - filters: - type: object - description: Applied filters - - /partners/{partnerId}: - get: - summary: Get partner details - description: Retrieve detailed information about a certified partner - tags: - - Partners - parameters: - - name: partnerId - in: path - required: true - schema: - type: string - description: Unique partner identifier - responses: - '200': - description: Successful response - content: - application/json: - schema: - $ref: '#/components/schemas/PartnerDetail' - '404': - $ref: '#/components/responses/NotFound' - - /partners/{partnerId}/certification: - get: - summary: Get certification details - description: Retrieve certification information for a partner - tags: - - Certification - parameters: - - name: partnerId - in: path - required: true - schema: - type: string - responses: - '200': - description: Successful response - content: - application/json: - schema: - $ref: '#/components/schemas/Certification' - '404': - $ref: '#/components/responses/NotFound' - - /partners/{partnerId}/verify: - get: - summary: Verify certification - description: Verify if a partner's certification is valid - tags: - - Certification - parameters: - - name: partnerId - in: path - required: true - schema: - type: string - responses: - '200': - description: Verification result - content: - application/json: - schema: - type: object - properties: - valid: - type: boolean - level: - type: string - enum: [bronze, silver, gold] - expires_at: - type: string - format: date-time - verification_id: - type: string - - /sdks: - get: - summary: List certified SDKs - description: Retrieve a list of all certified SDKs - tags: - - SDKs - parameters: - - name: language - in: query - schema: - type: string - enum: [python, java, javascript, typescript, go, rust] - description: Filter by programming language - - name: version - in: query - schema: - type: string - description: Filter by SDK version - - name: level - in: query - schema: - type: string - enum: [bronze, silver, gold] - description: Filter by certification level - responses: - '200': - description: Successful response - content: - application/json: - schema: - type: object - properties: - sdks: - type: array - items: - $ref: '#/components/schemas/SDKSummary' - - /sdks/{sdkId}: - get: - summary: Get SDK details - description: Retrieve detailed information about a certified SDK - tags: - - SDKs - parameters: - - name: sdkId - in: path - required: true - schema: - type: string - responses: - '200': - description: Successful response - content: - application/json: - schema: - $ref: '#/components/schemas/SDKDetail' - - /search: - get: - summary: Search registry - description: Search for partners, SDKs, and integrations - tags: - - Search - parameters: - - name: q - in: query - required: true - schema: - type: string - description: Search query - - name: type - in: query - schema: - type: string - enum: [partner, sdk, integration, all] - default: all - description: Search target type - - name: level - in: query - schema: - type: string - enum: [bronze, silver, gold] - description: Filter by certification level - responses: - '200': - description: Search results - content: - application/json: - schema: - type: object - properties: - results: - type: array - items: - $ref: '#/components/schemas/SearchResult' - total: - type: integer - query: - type: string - - /stats: - get: - summary: Registry statistics - description: Get overall registry statistics - tags: - - Statistics - responses: - '200': - description: Statistics - content: - application/json: - schema: - $ref: '#/components/schemas/RegistryStats' - - /badges/{partnerId}/{level}.svg: - get: - summary: Get certification badge - description: Retrieve SVG badge for certified partner - tags: - - Badges - parameters: - - name: partnerId - in: path - required: true - schema: - type: string - - name: level - in: path - required: true - schema: - type: string - enum: [bronze, silver, gold] - responses: - '200': - description: SVG badge - content: - image/svg+xml: - schema: - type: string - -components: - schemas: - PartnerSummary: - type: object - properties: - id: - type: string - description: Unique partner identifier - name: - type: string - description: Partner company name - logo_url: - type: string - description: URL to partner logo - description: - type: string - description: Brief partner description - website: - type: string - format: uri - description: Partner website URL - certification_level: - type: string - enum: [bronze, silver, gold] - description: Current certification level - category: - type: string - enum: [payment, erp, analytics, infrastructure] - description: Partner category - languages: - type: array - items: - type: string - description: Supported programming languages - certified_at: - type: string - format: date-time - description: Certification date - expires_at: - type: string - format: date-time - description: Certification expiration date - - PartnerDetail: - allOf: - - $ref: '#/components/schemas/PartnerSummary' - - type: object - properties: - contact_email: - type: string - format: email - description: Contact email - support_url: - type: string - format: uri - description: Support documentation URL - documentation_url: - type: string - format: uri - description: API documentation URL - github_url: - type: string - format: uri - description: GitHub repository URL - integration_count: - type: integer - description: Number of certified integrations - test_results: - type: object - properties: - api_compliance: - type: object - properties: - score: - type: number - minimum: 0 - maximum: 100 - tests_run: - type: integer - tests_passed: - type: integer - security: - type: object - properties: - score: - type: number - minimum: 0 - maximum: 100 - vulnerabilities_found: - type: integer - critical_issues: - type: integer - performance: - type: object - properties: - avg_response_time: - type: number - throughput: - type: number - uptime: - type: number - - Certification: - type: object - properties: - id: - type: string - description: Certification ID - partner_id: - type: string - description: Partner ID - level: - type: string - enum: [bronze, silver, gold] - description: Certification level - status: - type: string - enum: [active, suspended, expired] - description: Certification status - issued_at: - type: string - format: date-time - description: Issue date - expires_at: - type: string - format: date-time - description: Expiration date - test_results: - type: object - description: Test suite results - security_report: - type: object - description: Security validation report - criteria_met: - type: array - items: - type: string - description: List of certification criteria met - - SDKSummary: - type: object - properties: - id: - type: string - description: SDK identifier - name: - type: string - description: SDK name - language: - type: string - description: Programming language - version: - type: string - description: Latest version - partner_id: - type: string - description: Partner ID - partner_name: - type: string - description: Partner name - certification_level: - type: string - enum: [bronze, silver, gold] - download_url: - type: string - format: uri - description: Download URL - documentation_url: - type: string - format: uri - description: Documentation URL - certified_at: - type: string - format: date-time - - SDKDetail: - allOf: - - $ref: '#/components/schemas/SDKSummary' - - type: object - properties: - description: - type: string - description: SDK description - repository_url: - type: string - format: uri - description: Source repository URL - package_name: - type: string - description: Package name (pip, npm, maven) - dependencies: - type: array - items: - type: string - description: Key dependencies - supported_versions: - type: array - items: - type: string - description: Supported AITBC API versions - installation_command: - type: string - description: Installation command - quick_start: - type: string - description: Quick start code snippet - - SearchResult: - type: object - properties: - type: - type: string - enum: [partner, sdk, integration] - description: Result type - id: - type: string - description: Item ID - name: - type: string - description: Item name - description: - type: string - description: Item description - certification_level: - type: string - enum: [bronze, silver, gold] - url: - type: string - format: uri - description: Item URL - relevance_score: - type: number - description: Search relevance score - - Pagination: - type: object - properties: - page: - type: integer - description: Current page - limit: - type: integer - description: Items per page - total: - type: integer - description: Total items - pages: - type: integer - description: Total pages - has_next: - type: boolean - description: Has next page - has_prev: - type: boolean - description: Has previous page - - RegistryStats: - type: object - properties: - total_partners: - type: integer - description: Total certified partners - total_sdks: - type: integer - description: Total certified SDKs - certification_breakdown: - type: object - properties: - bronze: - type: integer - silver: - type: integer - gold: - type: integer - language_breakdown: - type: object - additionalProperties: - type: integer - description: Number of SDKs per language - category_breakdown: - type: object - additionalProperties: - type: integer - description: Number of partners per category - last_updated: - type: string - format: date-time - description: Last update timestamp - - responses: - NotFound: - description: Resource not found - content: - application/json: - schema: - type: object - properties: - error: - type: string - message: - type: string - - BadRequest: - description: Bad request - content: - application/json: - schema: - type: object - properties: - error: - type: string - message: - type: string - details: - type: object - - securitySchemes: - ApiKeyAuth: - type: apiKey - in: header - name: X-API-Key - description: API key for authenticated endpoints - -security: - - ApiKeyAuth: [] - -tags: - - name: Partners - description: Partner management and lookup - - name: SDKs - description: SDK information and downloads - - name: Certification - description: Certification verification and details - - name: Search - description: Registry search functionality - - name: Statistics - description: Registry statistics and metrics - - name: Badges - description: Certification badges diff --git a/ecosystem-certification/test-suite/README.md b/ecosystem-certification/test-suite/README.md deleted file mode 100644 index c2d0acff..00000000 --- a/ecosystem-certification/test-suite/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# AITBC SDK Conformance Test Suite - -Language-agnostic test suite for validating AITBC SDK implementations against the official API specification. - -## Architecture - -The test suite uses black-box HTTP API testing to validate SDK compliance: -- **Mock AITBC Server**: Validates requests against OpenAPI spec -- **Test Runners**: Docker containers for each language -- **Test Fixtures**: JSON/YAML test cases -- **Reporting**: Detailed compliance reports - -## Quick Start - -```bash -# Run Bronze certification tests -docker-compose run python-sdk bronze - -# Run Silver certification tests -docker-compose run python-sdk silver - -# Run all tests -docker-compose run python-sdk all -``` - -## Test Structure - -``` -test-suite/ -├── fixtures/ # Test cases (JSON/YAML) -├── runners/ # Language-specific test runners -├── mock-server/ # OpenAPI mock server -├── reports/ # Test results -└── docker-compose.yml -``` - -## Certification Levels - -### Bronze Tests -- API compliance -- Authentication -- Error handling -- Data model validation - -### Silver Tests -- Performance benchmarks -- Rate limiting -- Retry logic -- Async support - -### Gold Tests -- Enterprise features -- Scalability -- Security compliance -- SLA validation diff --git a/ecosystem-certification/test-suite/certify-stripe.py b/ecosystem-certification/test-suite/certify-stripe.py deleted file mode 100644 index 349ec6e7..00000000 --- a/ecosystem-certification/test-suite/certify-stripe.py +++ /dev/null @@ -1,175 +0,0 @@ -#!/usr/bin/env python3 -""" -Certify the AITBC Stripe connector as a validation of the certification system -""" - -import asyncio -import json -import sys -from pathlib import Path - -# Add test suite to path -sys.path.insert(0, str(Path(__file__).parent)) - -from runners.python.test_runner import ConformanceTestRunner -from security.security_validator import SecurityValidator - - -async def certify_stripe_connector(): - """Run full certification on Stripe connector""" - - print("=" * 60) - print("AITBC Stripe Connector Certification") - print("=" * 60) - - # Configuration - base_url = "http://localhost:8011" # Mock server - api_key = "test-api-key" - sdk_path = Path(__file__).parent.parent.parent / "enterprise-connectors" / "python-sdk" - - # 1. Run conformance tests - print("\n1. Running SDK Conformance Tests...") - runner = ConformanceTestRunner(base_url, api_key) - - # Run Bronze tests - bronze_suite = Path(__file__).parent / "fixtures" / "bronze" / "api-compliance.json" - bronze_result = await runner.run_suite(str(bronze_suite), "bronze") - - # Check if Bronze passed - if bronze_result.compliance_score < 95: - print(f"\n❌ Bronze certification FAILED: {bronze_result.compliance_score:.1f}%") - return False - - print(f"\n✅ Bronze certification PASSED: {bronze_result.compliance_score:.1f}%") - - # 2. Run security validation - print("\n2. Running Security Validation...") - validator = SecurityValidator() - security_report = validator.validate(str(sdk_path), "bronze") - - print(f"\nSecurity Score: {security_report.score}/100") - print(f"Issues Found: {len(security_report.issues)}") - - if security_report.blocked: - print("\n❌ Security validation BLOCKED certification") - for issue in security_report.issues: - if issue.severity in ["critical", "high"]: - print(f" - {issue.description} ({issue.severity})") - return False - - print("\n✅ Security validation PASSED") - - # 3. Generate certification report - print("\n3. Generating Certification Report...") - - certification = { - "partner": { - "name": "AITBC", - "id": "aitbc-official", - "website": "https://aitbc.io", - "description": "Official AITBC Python SDK with Stripe connector" - }, - "sdk": { - "name": "aitbc-enterprise-python", - "version": "1.0.0", - "language": "python", - "repository": "https://github.com/aitbc/enterprise-connectors" - }, - "certification": { - "level": "bronze", - "issued_at": "2024-01-15T00:00:00Z", - "expires_at": "2025-01-15T00:00:00Z", - "id": "CERT-STRIPE-001" - }, - "test_results": { - "api_compliance": { - "score": bronze_result.compliance_score, - "tests_run": bronze_result.total_tests, - "tests_passed": bronze_result.passed_tests - }, - "security": { - "score": security_report.score, - "vulnerabilities_found": len(security_report.issues), - "critical_issues": sum(1 for i in security_report.issues if i.severity == "critical") - } - }, - "criteria_met": [ - "Core API compatibility", - "Authentication support", - "Error handling standards", - "Data model compliance", - "Async support", - "Basic security practices", - "Documentation completeness" - ] - } - - # Save report - report_path = Path(__file__).parent / "reports" / "stripe-certification.json" - report_path.parent.mkdir(exist_ok=True) - - with open(report_path, 'w') as f: - json.dump(certification, f, indent=2) - - print(f"\n✅ Certification report saved to: {report_path}") - - # 4. Generate badge - print("\n4. Generating Certification Badge...") - - badge_svg = f''' - - - - - - - - - - - - - - AITBC - AITBC - Bronze - Bronze - - ''' - - badge_path = Path(__file__).parent / "reports" / "stripe-bronze.svg" - with open(badge_path, 'w') as f: - f.write(badge_svg) - - print(f"✅ Badge saved to: {badge_path}") - - # 5. Summary - print("\n" + "=" * 60) - print("CERTIFICATION COMPLETE") - print("=" * 60) - print(f"Partner: AITBC") - print(f"SDK: aitbc-enterprise-python (Stripe connector)") - print(f"Level: Bronze") - print(f"API Compliance: {bronze_result.compliance_score:.1f}%") - print(f"Security Score: {security_report.score}/100") - print(f"Certification ID: CERT-STRIPE-001") - print(f"Valid Until: 2025-01-15") - - return True - - -async def main(): - """Main entry point""" - success = await certify_stripe_connector() - - if success: - print("\n🎉 Stripe connector successfully certified!") - print("\nThe certification system is validated and ready for external partners.") - sys.exit(0) - else: - print("\n❌ Certification failed. Please fix issues before proceeding.") - sys.exit(1) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/ecosystem-certification/test-suite/fixtures/bronze/api-compliance.json b/ecosystem-certification/test-suite/fixtures/bronze/api-compliance.json deleted file mode 100644 index 5fe53eec..00000000 --- a/ecosystem-certification/test-suite/fixtures/bronze/api-compliance.json +++ /dev/null @@ -1,264 +0,0 @@ -{ - "name": "API Compliance Tests", - "level": "bronze", - "description": "Tests for core API compliance", - "tests": [ - { - "id": "BR-001", - "name": "Health Check Endpoint", - "description": "Validate health check endpoint returns proper response", - "request": { - "method": "GET", - "path": "/health", - "headers": { - "Accept": "application/json" - } - }, - "expected": { - "status": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "status": "healthy", - "timestamp": "string", - "version": "string" - } - } - }, - { - "id": "BR-002", - "name": "Authentication - Bearer Token", - "description": "Validate bearer token authentication", - "request": { - "method": "GET", - "path": "/api/v1/user/profile", - "headers": { - "Authorization": "Bearer valid-token", - "Accept": "application/json" - } - }, - "expected": { - "status": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "id": "string", - "email": "string", - "created_at": "string" - } - } - }, - { - "id": "BR-003", - "name": "Authentication - Invalid Token", - "description": "Validate proper error for invalid token", - "request": { - "method": "GET", - "path": "/api/v1/user/profile", - "headers": { - "Authorization": "Bearer invalid-token", - "Accept": "application/json" - } - }, - "expected": { - "status": 401, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "error": "AuthenticationError", - "message": "string" - } - } - }, - { - "id": "BR-004", - "name": "Create Job - Valid Request", - "description": "Validate job creation with valid parameters", - "request": { - "method": "POST", - "path": "/api/v1/jobs", - "headers": { - "Authorization": "Bearer valid-token", - "Content-Type": "application/json" - }, - "body": { - "service_type": "gpu_compute", - "spec": { - "gpu_type": "A100", - "count": 1, - "duration": 3600 - }, - "metadata": { - "name": "test-job" - } - } - }, - "expected": { - "status": 201, - "headers": { - "Content-Type": "application/json", - "Location": "string" - }, - "body": { - "id": "string", - "status": "pending", - "created_at": "string", - "estimated_completion": "string" - } - } - }, - { - "id": "BR-005", - "name": "Create Job - Invalid Parameters", - "description": "Validate proper error for invalid job parameters", - "request": { - "method": "POST", - "path": "/api/v1/jobs", - "headers": { - "Authorization": "Bearer valid-token", - "Content-Type": "application/json" - }, - "body": { - "service_type": "invalid_service", - "spec": {} - } - }, - "expected": { - "status": 400, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "error": "ValidationError", - "message": "string", - "details": { - "field": "service_type", - "issue": "string" - } - } - } - }, - { - "id": "BR-006", - "name": "Get Job - Valid ID", - "description": "Validate job retrieval with valid ID", - "request": { - "method": "GET", - "path": "/api/v1/jobs/job-123", - "headers": { - "Authorization": "Bearer valid-token", - "Accept": "application/json" - } - }, - "expected": { - "status": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "id": "string", - "status": "string", - "created_at": "string", - "updated_at": "string", - "spec": "object", - "result": "object|null" - } - } - }, - { - "id": "BR-007", - "name": "Get Job - Not Found", - "description": "Validate proper error for non-existent job", - "request": { - "method": "GET", - "path": "/api/v1/jobs/nonexistent", - "headers": { - "Authorization": "Bearer valid-token", - "Accept": "application/json" - } - }, - "expected": { - "status": 404, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "error": "NotFoundError", - "message": "string" - } - } - }, - { - "id": "BR-008", - "name": "List Jobs - With Pagination", - "description": "Validate job listing with pagination", - "request": { - "method": "GET", - "path": "/api/v1/jobs?limit=10&offset=0", - "headers": { - "Authorization": "Bearer valid-token", - "Accept": "application/json" - } - }, - "expected": { - "status": 200, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "jobs": "array", - "total": "number", - "limit": "number", - "offset": "number", - "has_more": "boolean" - } - } - }, - { - "id": "BR-009", - "name": "Error Response Format", - "description": "Validate consistent error response format", - "request": { - "method": "POST", - "path": "/api/v1/invalid-endpoint", - "headers": { - "Authorization": "Bearer valid-token" - } - }, - "expected": { - "status": 404, - "headers": { - "Content-Type": "application/json" - }, - "body": { - "error": "string", - "message": "string", - "request_id": "string" - } - } - }, - { - "id": "BR-010", - "name": "Rate Limit Headers", - "description": "Validate rate limit headers are present", - "request": { - "method": "GET", - "path": "/api/v1/jobs", - "headers": { - "Authorization": "Bearer valid-token" - } - }, - "expected": { - "status": 200, - "headers": { - "X-RateLimit-Limit": "string", - "X-RateLimit-Remaining": "string", - "X-RateLimit-Reset": "string" - } - } - } - ] -} diff --git a/ecosystem-certification/test-suite/runners/python/test_runner.py b/ecosystem-certification/test-suite/runners/python/test_runner.py deleted file mode 100644 index 455e7227..00000000 --- a/ecosystem-certification/test-suite/runners/python/test_runner.py +++ /dev/null @@ -1,357 +0,0 @@ -""" -Python SDK conformance test runner for AITBC ecosystem certification -""" - -import asyncio -import json -import time -import sys -import traceback -from datetime import datetime -from pathlib import Path -from typing import Dict, List, Any, Optional -import aiohttp -import pytest -from pydantic import BaseModel, ValidationError - -# Import the SDK being tested -try: - from aitbc_enterprise import AITBCClient, ConnectorConfig -except ImportError: - print("ERROR: AITBC SDK not found. Please install it first.") - sys.exit(1) - - -class TestResult(BaseModel): - """Individual test result""" - test_id: str - name: str - passed: bool - duration: float - error: Optional[str] = None - details: Optional[Dict[str, Any]] = None - - -class SuiteResult(BaseModel): - """Test suite result""" - suite_name: str - level: str - total_tests: int - passed_tests: int - failed_tests: int - duration: float - results: List[TestResult] - compliance_score: float - - -class ConformanceTestRunner: - """Main test runner for SDK conformance""" - - def __init__(self, base_url: str, api_key: str): - self.base_url = base_url - self.api_key = api_key - self.client: Optional[AITBCClient] = None - self.results: List[TestResult] = [] - - async def run_suite(self, suite_path: str, level: str) -> SuiteResult: - """Run a test suite""" - print(f"\n{'='*60}") - print(f"Running {level.upper()} Certification Tests") - print(f"{'='*60}") - - # Load test suite - with open(suite_path, 'r') as f: - suite = json.load(f) - - start_time = time.time() - - # Initialize client - config = ConnectorConfig( - base_url=self.base_url, - api_key=self.api_key, - timeout=30.0 - ) - - async with AITBCClient(config) as client: - self.client = client - - # Run all tests - for test in suite['tests']: - result = await self._run_test(test) - self.results.append(result) - - # Print result - status = "✓ PASS" if result.passed else "✗ FAIL" - print(f"{status} {result.name} ({result.duration:.3f}s)") - - if not result.passed: - print(f" Error: {result.error}") - - duration = time.time() - start_time - - # Calculate results - passed = sum(1 for r in self.results if r.passed) - failed = len(self.results) - passed - compliance_score = (passed / len(self.results)) * 100 - - suite_result = SuiteResult( - suite_name=suite['name'], - level=level, - total_tests=len(self.results), - passed_tests=passed, - failed_tests=failed, - duration=duration, - results=self.results, - compliance_score=compliance_score - ) - - # Print summary - self._print_summary(suite_result) - - return suite_result - - async def _run_test(self, test: Dict[str, Any]) -> TestResult: - """Run a single test""" - start_time = time.time() - - try: - # Execute request based on test definition - response_data = await self._execute_request(test['request']) - - # Validate response - validation_result = await self._validate_response( - response_data, - test.get('expected', {}) - ) - - if validation_result['passed']: - return TestResult( - test_id=test['id'], - name=test['name'], - passed=True, - duration=time.time() - start_time, - details=validation_result.get('details') - ) - else: - return TestResult( - test_id=test['id'], - name=test['name'], - passed=False, - duration=time.time() - start_time, - error=validation_result['error'], - details=validation_result.get('details') - ) - - except Exception as e: - return TestResult( - test_id=test['id'], - name=test['name'], - passed=False, - duration=time.time() - start_time, - error=str(e), - details={"traceback": traceback.format_exc()} - ) - - async def _execute_request(self, request: Dict[str, Any]) -> Dict[str, Any]: - """Execute HTTP request using SDK""" - method = request['method'].upper() - path = request['path'] - headers = request.get('headers', {}) - body = request.get('body') - - # Parse path parameters - if '?' in path: - path, query = path.split('?', 1) - params = dict(q.split('=') for q in query.split('&')) - else: - params = {} - - # Make request using SDK client - if method == 'GET': - response = await self.client.get(path, params=params) - elif method == 'POST': - response = await self.client.post(path, json=body) - elif method == 'PUT': - response = await self.client.put(path, json=body) - elif method == 'DELETE': - response = await self.client.delete(path) - else: - raise ValueError(f"Unsupported method: {method}") - - return { - 'status': 200, # SDK handles status codes - 'headers': headers, - 'body': response - } - - async def _validate_response( - self, - response: Dict[str, Any], - expected: Dict[str, Any] - ) -> Dict[str, Any]: - """Validate response against expectations""" - errors = [] - details = {} - - # Validate status code - if 'status' in expected: - if response['status'] != expected['status']: - errors.append( - f"Status mismatch: expected {expected['status']}, " - f"got {response['status']}" - ) - - # Validate headers - if 'headers' in expected: - for header, value in expected['headers'].items(): - if header not in response['headers']: - errors.append(f"Missing header: {header}") - elif value != 'string' and response['headers'][header] != value: - errors.append( - f"Header {header} mismatch: expected {value}, " - f"got {response['headers'][header]}" - ) - - # Validate body - if 'body' in expected: - body_errors = await self._validate_body( - response['body'], - expected['body'] - ) - errors.extend(body_errors) - - return { - 'passed': len(errors) == 0, - 'error': '; '.join(errors) if errors else None, - 'details': details - } - - async def _validate_body(self, actual: Any, expected: Any) -> List[str]: - """Validate response body""" - errors = [] - - if expected == 'string': - if not isinstance(actual, str): - errors.append(f"Expected string, got {type(actual).__name__}") - elif expected == 'number': - if not isinstance(actual, (int, float)): - errors.append(f"Expected number, got {type(actual).__name__}") - elif expected == 'boolean': - if not isinstance(actual, bool): - errors.append(f"Expected boolean, got {type(actual).__name__}") - elif expected == 'array': - if not isinstance(actual, list): - errors.append(f"Expected array, got {type(actual).__name__}") - elif expected == 'object': - if not isinstance(actual, dict): - errors.append(f"Expected object, got {type(actual).__name__}") - elif expected == 'null': - if actual is not None: - errors.append(f"Expected null, got {actual}") - elif isinstance(expected, dict): - if not isinstance(actual, dict): - errors.append(f"Expected object, got {type(actual).__name__}") - else: - for key, value in expected.items(): - if key not in actual: - errors.append(f"Missing field: {key}") - else: - field_errors = await self._validate_body(actual[key], value) - for error in field_errors: - errors.append(f"{key}.{error}") - - return errors - - def _print_summary(self, result: SuiteResult): - """Print test suite summary""" - print(f"\n{'='*60}") - print(f"Test Suite Summary") - print(f"{'='*60}") - print(f"Suite: {result.suite_name}") - print(f"Level: {result.level.upper()}") - print(f"Total Tests: {result.total_tests}") - print(f"Passed: {result.passed_tests}") - print(f"Failed: {result.failed_tests}") - print(f"Duration: {result.duration:.2f}s") - print(f"Compliance Score: {result.compliance_score:.1f}%") - - if result.failed_tests > 0: - print(f"\nFailed Tests:") - for test in result.results: - if not test.passed: - print(f" ✗ {test.name} - {test.error}") - - print(f"\n{'='*60}") - - # Certification status - if result.compliance_score >= 95: - print(f"✓ CERTIFIED - {result.level.upper()}") - else: - print(f"✗ NOT CERTIFIED - Score below 95%") - - def save_report(self, result: SuiteResult, output_dir: Path): - """Save test report to file""" - report = { - "timestamp": datetime.utcnow().isoformat(), - "suite": result.dict(), - "sdk_version": "1.0.0", # Get from SDK - "test_environment": { - "base_url": self.base_url, - "runner_version": "1.0.0" - } - } - - output_file = output_dir / f"report_{result.level}_{int(time.time())}.json" - with open(output_file, 'w') as f: - json.dump(report, f, indent=2) - - print(f"\nReport saved to: {output_file}") - - -async def main(): - """Main entry point""" - import argparse - - parser = argparse.ArgumentParser(description="AITBC SDK Conformance Test Runner") - parser.add_argument("--base-url", default="http://localhost:8011", help="AITBC API base URL") - parser.add_argument("--api-key", required=True, help="API key for authentication") - parser.add_argument("--level", choices=["bronze", "silver", "gold", "all"], default="bronze") - parser.add_argument("--output-dir", default="./reports", help="Output directory for reports") - - args = parser.parse_args() - - # Create output directory - output_dir = Path(args.output_dir) - output_dir.mkdir(exist_ok=True) - - # Initialize test runner - runner = ConformanceTestRunner(args.base_url, args.api_key) - - # Run tests based on level - if args.level == "all": - levels = ["bronze", "silver", "gold"] - else: - levels = [args.level] - - all_passed = True - - for level in levels: - suite_path = Path(__file__).parent.parent.parent / "fixtures" / level / "api-compliance.json" - - if not suite_path.exists(): - print(f"ERROR: Test suite not found: {suite_path}") - all_passed = False - continue - - result = await runner.run_suite(str(suite_path), level) - runner.save_report(result, output_dir) - - if result.compliance_score < 95: - all_passed = False - - # Exit with appropriate code - sys.exit(0 if all_passed else 1) - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/ecosystem-certification/test-suite/security/security_validator.py b/ecosystem-certification/test-suite/security/security_validator.py deleted file mode 100644 index 1cb25e86..00000000 --- a/ecosystem-certification/test-suite/security/security_validator.py +++ /dev/null @@ -1,638 +0,0 @@ -""" -Security validation framework for AITBC SDK certification -""" - -import json -import subprocess -import tempfile -import shutil -from pathlib import Path -from typing import Dict, List, Any, Optional -from dataclasses import dataclass, asdict -from datetime import datetime -import yaml - - -@dataclass -class SecurityIssue: - """Security issue representation""" - tool: str - severity: str # critical, high, medium, low - type: str # vulnerability, dependency, code_issue - description: str - file_path: Optional[str] = None - line_number: Optional[int] = None - cve_id: Optional[str] = None - remediation: Optional[str] = None - - -@dataclass -class SecurityReport: - """Security validation report""" - sdk_path: str - sdk_language: str - timestamp: datetime - issues: List[SecurityIssue] - score: float - certification_level: str - blocked: bool - - -class SecurityValidator: - """Main security validation orchestrator""" - - def __init__(self): - self.tools = { - "python": PythonSecurityValidator(), - "java": JavaSecurityValidator(), - "javascript": JavaScriptSecurityValidator(), - "typescript": TypeScriptSecurityValidator() - } - - def validate(self, sdk_path: str, certification_level: str = "bronze") -> SecurityReport: - """Validate SDK security""" - sdk_path = Path(sdk_path).resolve() - - # Detect language - language = self._detect_language(sdk_path) - if language not in self.tools: - raise ValueError(f"Unsupported language: {language}") - - # Run validation - validator = self.tools[language] - issues = validator.validate(sdk_path, certification_level) - - # Calculate score and determine certification status - score = self._calculate_score(issues, certification_level) - blocked = self._should_block_certification(issues, certification_level) - - return SecurityReport( - sdk_path=str(sdk_path), - sdk_language=language, - timestamp=datetime.utcnow(), - issues=issues, - score=score, - certification_level=certification_level, - blocked=blocked - ) - - def _detect_language(self, path: Path) -> str: - """Detect SDK programming language""" - # Check for language-specific files - if (path / "setup.py").exists() or (path / "pyproject.toml").exists(): - return "python" - elif (path / "pom.xml").exists() or (path / "build.gradle").exists(): - return "java" - elif (path / "package.json").exists(): - # Check if it's TypeScript - if any(path.rglob("*.ts")): - return "typescript" - return "javascript" - - raise ValueError("Could not detect SDK language") - - def _calculate_score(self, issues: List[SecurityIssue], level: str) -> float: - """Calculate security score (0-100)""" - weights = { - "critical": 25, - "high": 15, - "medium": 5, - "low": 1 - } - - total_deduction = 0 - for issue in issues: - total_deduction += weights.get(issue.severity, 0) - - score = max(0, 100 - total_deduction) - return score - - def _should_block_certification(self, issues: List[SecurityIssue], level: str) -> bool: - """Determine if issues should block certification""" - if level == "bronze": - # Block for critical or high severity issues - return any(i.severity in ["critical", "high"] for i in issues) - elif level == "silver": - # Block for critical issues - return any(i.severity == "critical" for i in issues) - elif level == "gold": - # Block for any issues - return len(issues) > 0 - - return False - - def export_sarif(self, report: SecurityReport, output_path: str): - """Export report in SARIF format""" - sarif = { - "version": "2.1.0", - "$schema": "https://json.schemastore.org/sarif-2.1.0.json", - "runs": [ - { - "tool": { - "driver": { - "name": "aitbc-security-validator", - "version": "1.0.0", - "informationUri": "https://aitbc.io/security" - } - }, - "results": [ - { - "ruleId": f"{issue.tool}-{issue.type}", - "level": self._map_severity_to_sarif(issue.severity), - "message": { - "text": issue.description - }, - "locations": [ - { - "physicalLocation": { - "artifactLocation": { - "uri": issue.file_path or "" - }, - "region": { - "startLine": issue.line_number or 1 - } - } - } - ], - "properties": { - "cve": issue.cve_id, - "remediation": issue.remediation - } - } - for issue in report.issues - ] - } - ] - } - - with open(output_path, 'w') as f: - json.dump(sarif, f, indent=2) - - def _map_severity_to_sarif(self, severity: str) -> str: - """Map severity to SARIF level""" - mapping = { - "critical": "error", - "high": "error", - "medium": "warning", - "low": "note" - } - return mapping.get(severity, "warning") - - -class PythonSecurityValidator: - """Python-specific security validation""" - - def validate(self, path: Path, level: str) -> List[SecurityIssue]: - """Run Python security checks""" - issues = [] - - # Dependency scanning with safety - issues.extend(self._scan_dependencies(path)) - - # Code analysis with bandit - if level in ["silver", "gold"]: - issues.extend(self._analyze_code(path)) - - # Check for secrets - if level == "gold": - issues.extend(self._scan_secrets(path)) - - return issues - - def _scan_dependencies(self, path: Path) -> List[SecurityIssue]: - """Scan Python dependencies for vulnerabilities""" - issues = [] - - # Find requirements files - req_files = list(path.rglob("requirements*.txt")) + list(path.rglob("pyproject.toml")) - - for req_file in req_files: - try: - # Run safety check - result = subprocess.run( - ["safety", "check", "--json", "--file", str(req_file)], - capture_output=True, - text=True, - cwd=path - ) - - if result.returncode == 0: - data = json.loads(result.stdout) - - for vuln in data: - issues.append(SecurityIssue( - tool="safety", - severity=self._map_safety_severity(vuln.get("advisory", "")), - type="dependency", - description=vuln.get("advisory", ""), - cve_id=vuln.get("cve"), - remediation=f"Update {vuln.get('package')} to {vuln.get('analyzed_version')}" - )) - except (subprocess.CalledProcessError, json.JSONDecodeError, FileNotFoundError): - # Safety not installed or failed - pass - - return issues - - def _analyze_code(self, path: Path) -> List[SecurityIssue]: - """Analyze Python code for security issues""" - issues = [] - - try: - # Run bandit - result = subprocess.run( - ["bandit", "-r", str(path), "-f", "json"], - capture_output=True, - text=True - ) - - if result.stdout: - data = json.loads(result.stdout) - - for issue in data.get("results", []): - issues.append(SecurityIssue( - tool="bandit", - severity=issue.get("issue_severity", "medium").lower(), - type="code_issue", - description=issue.get("issue_text", ""), - file_path=issue.get("filename"), - line_number=issue.get("line_number"), - remediation=issue.get("issue_cwe", {}).get("link") - )) - except (subprocess.CalledProcessError, json.JSONDecodeError, FileNotFoundError): - # Bandit not installed or failed - pass - - return issues - - def _scan_secrets(self, path: Path) -> List[SecurityIssue]: - """Scan for hardcoded secrets""" - issues = [] - - try: - # Run truffleHog - result = subprocess.run( - ["trufflehog", "--json", str(path)], - capture_output=True, - text=True - ) - - if result.stdout: - for line in result.stdout.strip().split('\n'): - if line: - finding = json.loads(line) - issues.append(SecurityIssue( - tool="trufflehog", - severity="high", - type="code_issue", - description="Hardcoded secret detected", - file_path=finding.get("path"), - line_number=finding.get("line"), - remediation="Remove hardcoded secret and use environment variables" - )) - except (subprocess.CalledProcessError, json.JSONDecodeError, FileNotFoundError): - # TruffleHog not installed or failed - pass - - return issues - - def _map_safety_severity(self, advisory: str) -> str: - """Map safety advisory to severity""" - advisory_lower = advisory.lower() - if any(word in advisory_lower for word in ["critical", "remote code execution"]): - return "critical" - elif any(word in advisory_lower for word in ["high", "execution", "bypass"]): - return "high" - elif any(word in advisory_lower for word in ["medium"]): - return "medium" - else: - return "low" - - -class JavaSecurityValidator: - """Java-specific security validation""" - - def validate(self, path: Path, level: str) -> List[SecurityIssue]: - """Run Java security checks""" - issues = [] - - # Dependency scanning with OWASP Dependency Check - issues.extend(self._scan_dependencies(path)) - - # Code analysis with SpotBugs - if level in ["silver", "gold"]: - issues.extend(self._analyze_code(path)) - - return issues - - def _scan_dependencies(self, path: Path) -> List[SecurityIssue]: - """Scan Java dependencies for vulnerabilities""" - issues = [] - - # Look for pom.xml or build.gradle - pom_file = path / "pom.xml" - gradle_file = path / "build.gradle" - - if pom_file.exists(): - # Run Maven dependency check - try: - result = subprocess.run( - ["mvn", "org.owasp:dependency-check-maven:check"], - capture_output=True, - text=True, - cwd=path - ) - - # Parse XML report - report_path = path / "target" / "dependency-check-report.xml" - if report_path.exists(): - issues.extend(self._parse_dependency_check_report(report_path)) - except subprocess.CalledProcessError: - pass - - elif gradle_file.exists(): - # Run Gradle dependency check - try: - result = subprocess.run( - ["./gradlew", "dependencyCheckAnalyze"], - capture_output=True, - text=True, - cwd=path - ) - - # Parse XML report - report_path = path / "build" / "reports" / "dependency-check-report.xml" - if report_path.exists(): - issues.extend(self._parse_dependency_check_report(report_path)) - except subprocess.CalledProcessError: - pass - - return issues - - def _parse_dependency_check_report(self, report_path: Path) -> List[SecurityIssue]: - """Parse OWASP Dependency Check XML report""" - import xml.etree.ElementTree as ET - - issues = [] - try: - tree = ET.parse(report_path) - root = tree.getroot() - - for vulnerability in root.findall(".//vulnerability"): - name = vulnerability.get("name") - severity = vulnerability.get("severity") - cve = vulnerability.get("cve") - - # Map severity - if severity.upper() in ["CRITICAL", "HIGH"]: - mapped_severity = "high" - elif severity.upper() == "MEDIUM": - mapped_severity = "medium" - else: - mapped_severity = "low" - - issues.append(SecurityIssue( - tool="dependency-check", - severity=mapped_severity, - type="dependency", - description=f"Vulnerability in {name}", - cve_id=cve, - remediation="Update dependency to patched version" - )) - except ET.ParseError: - pass - - return issues - - def _analyze_code(self, path: Path) -> List[SecurityIssue]: - """Analyze Java code with SpotBugs""" - issues = [] - - try: - # Run SpotBugs - result = subprocess.run( - ["spotbugs", "-textui", "-xml:withMessages", "-low", str(path)], - capture_output=True, - text=True - ) - - # Parse SpotBugs XML report - report_path = path / "spotbugsXml.xml" - if report_path.exists(): - issues.extend(self._parse_spotbugs_report(report_path)) - except subprocess.CalledProcessError: - pass - - return issues - - def _parse_spotbugs_report(self, report_path: Path) -> List[SecurityIssue]: - """Parse SpotBugs XML report""" - import xml.etree.ElementTree as ET - - issues = [] - try: - tree = ET.parse(report_path) - root = tree.getroot() - - for instance in root.findall(".//BugInstance"): - bug_type = instance.get("type") - priority = instance.get("priority") - - # Map priority to severity - if priority == "1": - severity = "high" - elif priority == "2": - severity = "medium" - else: - severity = "low" - - source_line = instance.find(".//SourceLine") - if source_line is not None: - issues.append(SecurityIssue( - tool="spotbugs", - severity=severity, - type="code_issue", - description=bug_type, - file_path=source_line.get("sourcepath"), - line_number=int(source_line.get("start", 0)), - remediation=f"Fix {bug_type} security issue" - )) - except ET.ParseError: - pass - - return issues - - -class JavaScriptSecurityValidator: - """JavaScript-specific security validation""" - - def validate(self, path: Path, level: str) -> List[SecurityIssue]: - """Run JavaScript security checks""" - issues = [] - - # Dependency scanning with npm audit - issues.extend(self._scan_dependencies(path)) - - # Code analysis with ESLint security rules - if level in ["silver", "gold"]: - issues.extend(self._analyze_code(path)) - - return issues - - def _scan_dependencies(self, path: Path) -> List[SecurityIssue]: - """Scan npm dependencies for vulnerabilities""" - issues = [] - - package_json = path / "package.json" - if not package_json.exists(): - return issues - - try: - # Run npm audit - result = subprocess.run( - ["npm", "audit", "--json"], - capture_output=True, - text=True, - cwd=path - ) - - if result.stdout: - data = json.loads(result.stdout) - - for advisory_id, advisory in data.get("vulnerabilities", {}).items(): - severity = advisory.get("severity", "low") - - issues.append(SecurityIssue( - tool="npm-audit", - severity=severity, - type="dependency", - description=advisory.get("title", ""), - cve_id=advisory.get("cwe"), - remediation=f"Run npm audit fix" - )) - except (subprocess.CalledProcessError, json.JSONDecodeError, FileNotFoundError): - pass - - return issues - - def _analyze_code(self, path: Path) -> List[SecurityIssue]: - """Analyze JavaScript code with ESLint""" - issues = [] - - try: - # Run ESLint with security plugin - result = subprocess.run( - ["npx", "eslint", "--format", "json", str(path)], - capture_output=True, - text=True - ) - - if result.stdout: - data = json.loads(result.stdout) - - for file_result in data: - for message in file_result.get("messages", []): - if "security" in message.get("ruleId", "").lower(): - issues.append(SecurityIssue( - tool="eslint", - severity="medium", - type="code_issue", - description=message.get("message"), - file_path=file_result.get("filePath"), - line_number=message.get("line"), - remediation=f"Fix {message.get('ruleId')} issue" - )) - except (subprocess.CalledProcessError, json.JSONDecodeError, FileNotFoundError): - pass - - return issues - - -class TypeScriptSecurityValidator(JavaScriptSecurityValidator): - """TypeScript-specific security validation (inherits from JavaScript)""" - - def validate(self, path: Path, level: str) -> List[SecurityIssue]: - """Run TypeScript security checks""" - # Run JavaScript checks first - issues = super().validate(path, level) - - # Additional TypeScript-specific checks - if level == "gold": - issues.extend(self._check_typescript_config(path)) - - return issues - - def _check_typescript_config(self, path: Path) -> List[SecurityIssue]: - """Check TypeScript configuration for security""" - issues = [] - - tsconfig = path / "tsconfig.json" - if tsconfig.exists(): - try: - with open(tsconfig) as f: - config = json.load(f) - - compiler_options = config.get("compilerOptions", {}) - - # Check for implicit any - if compiler_options.get("noImplicitAny") is not True: - issues.append(SecurityIssue( - tool="typescript-config", - severity="low", - type="code_issue", - description="TypeScript should disable implicit any", - file_path=str(tsconfig), - remediation="Set noImplicitAny to true" - )) - - # Check for strict mode - if compiler_options.get("strict") is not True: - issues.append(SecurityIssue( - tool="typescript-config", - severity="low", - type="code_issue", - description="TypeScript should use strict mode", - file_path=str(tsconfig), - remediation="Set strict to true" - )) - except json.JSONDecodeError: - pass - - return issues - - -def main(): - """CLI entry point""" - import argparse - - parser = argparse.ArgumentParser(description="AITBC SDK Security Validator") - parser.add_argument("sdk_path", help="Path to SDK directory") - parser.add_argument("--level", choices=["bronze", "silver", "gold"], default="bronze") - parser.add_argument("--output", help="Output SARIF report path") - parser.add_argument("--format", choices=["json", "sarif"], default="json") - - args = parser.parse_args() - - # Run validation - validator = SecurityValidator() - report = validator.validate(args.sdk_path, args.level) - - # Output results - if args.format == "sarif" and args.output: - validator.export_sarif(report, args.output) - else: - print(json.dumps(asdict(report), indent=2, default=str)) - - # Exit with error if blocked - if report.blocked: - print(f"\nCERTIFICATION BLOCKED: Security issues found") - for issue in report.issues: - if issue.severity in ["critical", "high"]: - print(f" - {issue.description} ({issue.severity})") - exit(1) - else: - print(f"\nSECURITY CHECK PASSED: Score {report.score}/100") - - -if __name__ == "__main__": - main() diff --git a/ecosystem-extensions/template/cookiecutter.json b/ecosystem-extensions/template/cookiecutter.json deleted file mode 100644 index 00614332..00000000 --- a/ecosystem-extensions/template/cookiecutter.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "extension_name": { - "type": "string", - "help": "Name of your extension (e.g., 'sap-connector')", - "default": "my-extension" - }, - "extension_display_name": { - "type": "string", - "help": "Display name for your extension", - "default": "My Extension" - }, - "extension_description": { - "type": "string", - "help": "Brief description of what your extension does", - "default": "An AITBC ecosystem extension" - }, - "extension_type": { - "type": "choice", - "choices": [ - "payment", - "erp", - "analytics", - "developer" - ], - "help": "Type of extension you're building", - "default": "payment" - }, - "author_name": { - "type": "string", - "help": "Your name or organization name", - "default": "Your Name" - }, - "author_email": { - "type": "string", - "help": "Contact email", - "default": "your.email@example.com" - }, - "github_username": { - "type": "string", - "help": "GitHub username for the repository", - "default": "yourusername" - }, - "package_name": { - "type": "string", - "help": "Python package name (will be auto-formatted)", - "default": "{{ cookiecutter.extension_name|replace('-', '_')|replace(' ', '_') }}" - }, - "class_name": { - "type": "string", - "help": "Main class name (will be auto-formatted)", - "default": "{{ cookiecutter.extension_name|title|replace('-', '')|replace(' ', '') }}Connector" - }, - "version": { - "type": "string", - "help": "Initial version", - "default": "0.1.0" - }, - "python_version": { - "type": "string", - "help": "Minimum Python version", - "default": "3.8" - }, - "use_asyncio": { - "type": "bool", - "help": "Use asyncio for async operations", - "default": true - }, - "include_tests": { - "type": "bool", - "help": "Include test suite template", - "default": true - }, - "include_docs": { - "type": "bool", - "help": "Include documentation template", - "default": true - }, - "license": { - "type": "choice", - "choices": [ - "MIT", - "Apache-2.0", - "BSD-3-Clause", - "GPL-3.0-or-later" - ], - "help": "License for your extension", - "default": "MIT" - } -} diff --git a/ecosystem-extensions/template/{{cookiecutter.package_name}}/extension.yaml b/ecosystem-extensions/template/{{cookiecutter.package_name}}/extension.yaml deleted file mode 100644 index 49e69061..00000000 --- a/ecosystem-extensions/template/{{cookiecutter.package_name}}/extension.yaml +++ /dev/null @@ -1,304 +0,0 @@ -# AITBC Extension Manifest -# This file defines the extension metadata and lifecycle configuration - -apiVersion: "v1" -kind: "Extension" - -# Basic information -metadata: - name: "{{ cookiecutter.extension_name }}" - displayName: "{{ cookiecutter.extension_display_name }}" - description: "{{ cookiecutter.extension_description }}" - version: "{{ cookiecutter.version }}" - author: "{{ cookiecutter.author_name }}" - email: "{{ cookiecutter.author_email }}" - license: "{{ cookiecutter.license }}" - homepage: "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}" - repository: "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}.git" - documentation: "https://{{ cookiecutter.extension_name }}.readthedocs.io" - -# Extension classification -spec: - type: "{{ cookiecutter.extension_type }}" - category: - {% if cookiecutter.extension_type == "payment" %} - - "payment-processor" - {% elif cookiecutter.extension_type == "erp" %} - - "erp-connector" - {% elif cookiecutter.extension_type == "analytics" %} - - "analytics-tool" - {% else %} - - "developer-tool" - {% endif %} - - # AITBC compatibility - aitbc: - minVersion: "1.0.0" - maxVersion: "2.0.0" - sdkVersion: "^1.0.0" - - # Runtime requirements - runtime: - python: ">= {{ cookiecutter.python_version }}" - {% if cookiecutter.use_asyncio %} - features: ["async"] - {% endif %} - - # Dependencies - dependencies: - core: - - "aitbc-enterprise>=1.0.0" - {% if cookiecutter.extension_type == "payment" %} - payments: - - "stripe>=5.0.0" - {% elif cookiecutter.extension_type == "erp" %} - erp: - - "requests>=2.25.0" - - "pandas>=1.3.0" - {% elif cookiecutter.extension_type == "analytics" %} - analytics: - - "matplotlib>=3.5.0" - - "plotly>=5.0.0" - {% else %} - devtools: - - "click>=8.0.0" - {% endif %} - - # Extension configuration schema - configSchema: - type: "object" - properties: - {% if cookiecutter.extension_type == "payment" %} - api_key: - type: "string" - description: "API key for the payment service" - sensitive: true - webhook_secret: - type: "string" - description: "Webhook secret for verification" - sensitive: true - sandbox: - type: "boolean" - description: "Use sandbox environment" - default: false - {% elif cookiecutter.extension_type == "erp" %} - host: - type: "string" - description: "ERP system host" - format: "hostname" - port: - type: "integer" - description: "ERP system port" - default: 443 - username: - type: "string" - description: "ERP username" - sensitive: true - password: - type: "string" - description: "ERP password" - sensitive: true - database: - type: "string" - description: "ERP database name" - {% elif cookiecutter.extension_type == "analytics" %} - data_source: - type: "string" - description: "Data source URL" - refresh_interval: - type: "integer" - description: "Data refresh interval in seconds" - default: 300 - retention_days: - type: "integer" - description: "Data retention period in days" - default: 90 - {% else %} - debug_mode: - type: "boolean" - description: "Enable debug logging" - default: false - log_level: - type: "string" - enum: ["DEBUG", "INFO", "WARNING", "ERROR"] - default: "INFO" - {% endif %} - required: - {% if cookiecutter.extension_type == "payment" %} - - "api_key" - {% elif cookiecutter.extension_type == "erp" %} - - "host" - - "username" - - "password" - - "database" - {% elif cookiecutter.extension_type == "analytics" %} - - "data_source" - {% endif %} - - # Health check configuration - health: - enabled: true - endpoint: "/health" - interval: 30 - timeout: 5 - checks: - - name: "service_connection" - type: "external" - command: "python -c 'import {{ cookiecutter.package_name }}; print(\"OK\")'" - {% if cookiecutter.extension_type == "payment" %} - - name: "payment_api" - type: "http" - url: "https://api.stripe.com/v1" - expectedStatus: 200 - {% endif %} - - # Metrics configuration - metrics: - enabled: true - endpoint: "/metrics" - format: "prometheus" - customMetrics: - {% if cookiecutter.extension_type == "payment" %} - - name: "payment_operations_total" - type: "counter" - help: "Total number of payment operations" - - name: "payment_amount_sum" - type: "histogram" - help: "Payment amount distribution" - {% elif cookiecutter.extension_type == "erp" %} - - name: "sync_operations_total" - type: "counter" - help: "Total number of sync operations" - - name: "sync_records_processed" - type: "counter" - help: "Total records processed during sync" - {% elif cookiecutter.extension_type == "analytics" %} - - name: "analytics_queries_total" - type: "counter" - help: "Total number of analytics queries" - - name: "data_processing_time" - type: "histogram" - help: "Time spent processing analytics data" - {% endif %} - - # Webhook configuration (if applicable) - {% if cookiecutter.extension_type == "payment" %} - webhooks: - enabled: true - events: - - "payment.created" - - "payment.succeeded" - - "payment.failed" - - "refund.created" - endpoint: "/webhooks" - secret: "{{ cookiecutter.extension_name }}_webhook" - retryPolicy: - maxRetries: 3 - backoff: "exponential" - {% endif %} - - # Security configuration - security: - {% if cookiecutter.extension_type == "payment" %} - pciCompliance: true - dataEncryption: true - {% elif cookiecutter.extension_type == "erp" %} - tlsRequired: true - auditLogging: true - {% endif %} - permissions: - - "read:transactions" - - "write:transactions" - {% if cookiecutter.extension_type == "erp" %} - - "read:customers" - - "write:customers" - {% endif %} - -# Deployment configuration -deployment: - type: "docker" - - # Docker configuration - docker: - image: "{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}:{{ cookiecutter.version }}" - ports: - - "8080:8080" - environment: - - "AITBC_ENV=production" - - "LOG_LEVEL=INFO" - volumes: - - "/data/{{ cookiecutter.extension_name }}:/app/data" - resources: - limits: - cpu: "500m" - memory: "512Mi" - requests: - cpu: "100m" - memory: "128Mi" - - # Kubernetes configuration (optional) - kubernetes: - enabled: false - replicas: 2 - service: - type: "ClusterIP" - port: 80 - ingress: - enabled: false - host: "{{ cookiecutter.extension_name }}.aitbc.local" - - # Scaling configuration - scaling: - minReplicas: 1 - maxReplicas: 10 - targetCPUUtilization: 70 - targetMemoryUtilization: 80 - -# Testing configuration -testing: - frameworks: - - "pytest" - - "pytest-asyncio" # if asyncio enabled - coverage: - enabled: true - threshold: 80 - environments: - - name: "unit" - command: "pytest tests/unit/" - - name: "integration" - command: "pytest tests/integration/" - - name: "e2e" - command: "pytest tests/e2e/" - -# Documentation -documentation: - type: "sphinx" - theme: "sphinx_rtd_theme" - build: - command: "sphinx-build -b html docs docs/_build" - deploy: - type: "github-pages" - branch: "gh-pages" - -# Release configuration -release: - type: "semantic" - branches: - main: "main" - develop: "develop" - release: "release/*" - changelog: - enabled: true - file: "CHANGELOG.md" - artifacts: - - "dist/*.whl" - - "dist/*.tar.gz" - -# Support information -support: - website: "https://{{ cookiecutter.extension_name }}.aitbc.io" - documentation: "https://{{ cookiecutter.extension_name }}.readthedocs.io" - issues: "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}/issues" - discussions: "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}/discussions" - email: "{{ cookiecutter.author_email }}" - slack: "#{{ cookiecutter.extension_name }}-support" diff --git a/ecosystem-extensions/template/{{cookiecutter.package_name}}/setup.py b/ecosystem-extensions/template/{{cookiecutter.package_name}}/setup.py deleted file mode 100644 index ff2fa5e4..00000000 --- a/ecosystem-extensions/template/{{cookiecutter.package_name}}/setup.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -Setup script for {{ cookiecutter.extension_display_name }} -""" - -from setuptools import setup, find_packages -import os - -# Read the contents of README file -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Read requirements -with open(os.path.join(this_directory, 'requirements.txt'), encoding='utf-8') as f: - requirements = f.read().splitlines() - -setup( - name="{{ cookiecutter.package_name }}", - version="{{ cookiecutter.version }}", - author="{{ cookiecutter.author_name }}", - author_email="{{ cookiecutter.author_email }}", - description="{{ cookiecutter.extension_description }}", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}", - packages=find_packages(), - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: {{ cookiecutter.license }} License", - "Operating System :: OS Independent", - "Programming Language :: Python :: {{ cookiecutter.python_version }}", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: Office/Business :: Financial", - {% if cookiecutter.extension_type == "payment" %} - "Topic :: Office/Business :: Financial :: Point-Of-Sale", - {% elif cookiecutter.extension_type == "erp" %} - "Topic :: Office/Business", - {% elif cookiecutter.extension_type == "analytics" %} - "Topic :: Scientific/Engineering :: Information Analysis", - {% else %} - "Topic :: Software Development :: Libraries", - {% endif %} - ], - python_requires=">={{ cookiecutter.python_version }}", - install_requires=requirements, - extras_require={ - "dev": [ - "pytest>=6.0", - "pytest-asyncio>=0.18.0" if {{ cookiecutter.use_asyncio|lower }} else "", - "pytest-cov>=2.12", - "black>=21.0", - "isort>=5.9", - "flake8>=3.9", - "mypy>=0.910", - "pre-commit>=2.15", - ], - "docs": [ - "sphinx>=4.0", - "sphinx-rtd-theme>=1.0", - "myst-parser>=0.15", - ], - {% if cookiecutter.extension_type == "analytics" %} - "viz": [ - "matplotlib>=3.5.0", - "plotly>=5.0.0", - "seaborn>=0.11.0", - ], - {% endif %} - }, - entry_points={ - "console_scripts": [ - "{{ cookiecutter.package_name }}={{ cookiecutter.package_name }}.cli:main", - ], - "aitbc.extensions": [ - "{{ cookiecutter.extension_name }}={{ cookiecutter.package_name }}.{{ cookiecutter.class_name }}", - ], - }, - include_package_data=True, - package_data={ - "{{ cookiecutter.package_name }}": [ - "templates/*.yaml", - "templates/*.json", - "static/*", - ], - }, - zip_safe=False, - keywords="aitbc {{ cookiecutter.extension_type }} {{ cookiecutter.extension_name }}", - project_urls={ - "Bug Reports": "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}/issues", - "Source": "https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.extension_name }}", - "Documentation": "https://{{ cookiecutter.extension_name }}.readthedocs.io", - }, -) diff --git a/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/__init__.py b/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/__init__.py deleted file mode 100644 index 1c20186c..00000000 --- a/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -{{ cookiecutter.extension_display_name }} - AITBC Extension - -{{ cookiecutter.extension_description }} -""" - -__version__ = "{{ cookiecutter.version }}" -__author__ = "{{ cookiecutter.author_name }} <{{ cookiecutter.author_email }}>" -__license__ = "{{ cookiecutter.license }}" - -from .{{ cookiecutter.extension_name }} import {{ cookiecutter.class_name }} - -__all__ = ["{{ cookiecutter.class_name }}"] diff --git a/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/{{ cookiecutter.extension_name }}.py b/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/{{ cookiecutter.extension_name }}.py deleted file mode 100644 index d009e530..00000000 --- a/ecosystem-extensions/template/{{cookiecutter.package_name}}/{{cookiecutter.package_name}}/{{ cookiecutter.extension_name }}.py +++ /dev/null @@ -1,369 +0,0 @@ -""" -{{ cookiecutter.extension_display_name }} Connector - -{{ cookiecutter.extension_description }} -""" - -{% if cookiecutter.use_asyncio %} -import asyncio -from typing import Dict, Any, Optional, List -{% else %} -from typing import Dict, Any, Optional, List -{% endif %} - -from aitbc_enterprise.base import BaseConnector -from aitbc_enterprise.core import AITBCClient, ConnectorConfig -from aitbc_enterprise.exceptions import ConnectorError - -{% if cookiecutter.extension_type == "payment" %} -from aitbc_enterprise.payments.base import PaymentConnector, Charge, Refund, PaymentMethod -{% elif cookiecutter.extension_type == "erp" %} -from aitbc_enterprise.erp.base import ERPConnector, ERPDataModel, SyncResult -{% endif %} - - -class {{ cookiecutter.class_name }}({% if cookiecutter.extension_type == "payment" %}PaymentConnector{% elif cookiecutter.extension_type == "erp" %}ERPConnector{% else %}BaseConnector{% endif %}): - """ - {{ cookiecutter.extension_display_name }} connector for AITBC - - This connector provides integration with {{ cookiecutter.extension_name }}. - """ - - def __init__(self, client: AITBCClient, config: ConnectorConfig): - """ - Initialize the {{ cookiecutter.extension_name }} connector - - Args: - client: AITBC client instance - config: Connector configuration - """ - super().__init__(client, config) - - # Initialize your service client here - # Example: - # self.service_client = ServiceClient( - # api_key=config.settings.get("api_key"), - # base_url=config.settings.get("base_url") - # ) - - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - {% if cookiecutter.use_asyncio %} - async def initialize(self): - """ - Initialize the connector and establish connections - """ - await super().initialize() - - # Initialize your service connection here - # Example: - # await self.service_client.authenticate() - - self.logger.info("{{ cookiecutter.class_name }} initialized successfully") - - async def cleanup(self): - """ - Cleanup resources and close connections - """ - # Cleanup your service connection here - # Example: - # await self.service_client.close() - - await super().cleanup() - - self.logger.info("{{ cookiecutter.class_name }} cleaned up successfully") - {% endif %} - - {% if cookiecutter.extension_type == "payment" %} - {% if cookiecutter.use_asyncio %} - async def create_charge( - self, - amount: int, - currency: str, - source: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None - ) -> Charge: - """ - Create a payment charge - - Args: - amount: Amount in smallest currency unit - currency: Currency code (e.g., 'USD') - source: Payment source identifier - description: Optional description - metadata: Optional metadata - - Returns: - Charge object representing the payment - """ - try: - # Implement charge creation logic here - # Example: - # charge_data = await self.service_client.create_charge({ - # "amount": amount, - # "currency": currency, - # "source": source, - # "description": description, - # "metadata": metadata or {} - # }) - - # Convert to AITBC Charge format - charge = Charge( - id="charge_123", # From service response - amount=amount, - currency=currency, - status="pending", # From service response - created_at=__import__('datetime').datetime.utcnow(), - metadata=metadata or {} - ) - - # Log the operation - await self._log_operation("create_charge", { - "amount": amount, - "currency": currency, - "charge_id": charge.id - }) - - return charge - - except Exception as e: - self.logger.error(f"Failed to create charge: {e}") - raise ConnectorError(f"Charge creation failed: {e}") - - async def refund_charge( - self, - charge_id: str, - amount: Optional[int] = None, - reason: Optional[str] = None - ) -> Refund: - """ - Refund a charge - - Args: - charge_id: ID of charge to refund - amount: Optional amount to refund (full if None) - reason: Optional refund reason - - Returns: - Refund object - """ - # Implement refund logic here - pass - - async def get_charge(self, charge_id: str) -> Charge: - """ - Get charge details - - Args: - charge_id: Charge ID - - Returns: - Charge object - """ - # Implement charge retrieval here - pass - {% else %} - def create_charge( - self, - amount: int, - currency: str, - source: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None - ) -> Charge: - """ - Create a payment charge (synchronous version) - """ - # Synchronous implementation - pass - {% endif %} - - {% elif cookiecutter.extension_type == "erp" %} - {% if cookiecutter.use_asyncio %} - async def sync_data( - self, - data_type: str, - start_date: Optional[__import__('datetime').datetime] = None, - end_date: Optional[__import__('datetime').datetime] = None - ) -> SyncResult: - """ - Sync data from ERP system - - Args: - data_type: Type of data to sync (e.g., 'customers', 'orders') - start_date: Optional start date for sync - end_date: Optional end date for sync - - Returns: - SyncResult with sync statistics - """ - try: - # Implement sync logic here - # Example: - # data = await self.service_client.get_data( - # data_type=data_type, - # start_date=start_date, - # end_date=end_date - # ) - - # Process and transform data - # processed_data = self._transform_data(data) - - # Store in AITBC - # await self._store_data(processed_data) - - result = SyncResult( - records_processed=100, # From actual sync - records_created=80, - records_updated=20, - errors=[], - sync_time=__import__('datetime').datetime.utcnow() - ) - - # Log the operation - await self._log_operation("sync_data", { - "data_type": data_type, - "records_processed": result.records_processed - }) - - return result - - except Exception as e: - self.logger.error(f"Failed to sync {data_type}: {e}") - raise ConnectorError(f"Data sync failed: {e}") - - async def get_data_model(self, data_type: str) -> ERPDataModel: - """ - Get data model for ERP data type - - Args: - data_type: Type of data - - Returns: - ERPDataModel definition - """ - # Implement data model retrieval here - pass - {% else %} - def sync_data( - self, - data_type: str, - start_date: Optional[__import__('datetime').datetime] = None, - end_date: Optional[__import__('datetime').datetime] = None - ) -> SyncResult: - """ - Sync data from ERP system (synchronous version) - """ - # Synchronous implementation - pass - {% endif %} - - {% else %} - {% if cookiecutter.use_asyncio %} - async def execute_operation( - self, - operation: str, - parameters: Optional[Dict[str, Any]] = None - ) -> Dict[str, Any]: - """ - Execute a custom operation - - Args: - operation: Operation name - parameters: Optional parameters - - Returns: - Operation result - """ - try: - # Implement your custom operation here - result = { - "operation": operation, - "parameters": parameters, - "result": "success", - "timestamp": __import__('datetime').datetime.utcnow().isoformat() - } - - # Log the operation - await self._log_operation("execute_operation", { - "operation": operation, - "parameters": parameters - }) - - return result - - except Exception as e: - self.logger.error(f"Failed to execute {operation}: {e}") - raise ConnectorError(f"Operation failed: {e}") - {% else %} - def execute_operation( - self, - operation: str, - parameters: Optional[Dict[str, Any]] = None - ) -> Dict[str, Any]: - """ - Execute a custom operation (synchronous version) - """ - # Synchronous implementation - pass - {% endif %} - {% endif %} - - # Helper methods - - def _transform_data(self, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """ - Transform data from external format to AITBC format - - Args: - data: Raw data from external service - - Returns: - Transformed data - """ - # Implement data transformation logic here - return data - - {% if cookiecutter.use_asyncio %} - async def _store_data(self, data: List[Dict[str, Any]]) -> bool: - """ - Store data in AITBC - - Args: - data: Data to store - - Returns: - True if successful - """ - # Implement data storage logic here - return True - {% else %} - def _store_data(self, data: List[Dict[str, Any]]) -> bool: - """ - Store data in AITBC (synchronous version) - """ - # Synchronous implementation - return True - {% endif %} - - def validate_config(self) -> bool: - """ - Validate connector configuration - - Returns: - True if configuration is valid - """ - required_settings = [] - - {% if cookiecutter.extension_type == "payment" %} - required_settings = ["api_key", "webhook_secret"] - {% elif cookiecutter.extension_type == "erp" %} - required_settings = ["host", "username", "password", "database"] - {% endif %} - - for setting in required_settings: - if setting not in self.config.settings: - raise ConnectorError(f"Missing required setting: {setting}") - - return True diff --git a/ecosystem/academic/engagement-framework.md b/ecosystem/academic/engagement-framework.md deleted file mode 100644 index 3f542227..00000000 --- a/ecosystem/academic/engagement-framework.md +++ /dev/null @@ -1,335 +0,0 @@ -# AITBC Academic Engagement Framework - -## Overview - -This framework outlines AITBC's strategy for engaging with academia and open-source foundations to steward long-term protocol evolution, foster research collaboration, and ensure the protocol's technical excellence and widespread adoption. - -## Mission - -To establish AITBC as the premier platform for decentralized AI research through strategic partnerships with academic institutions and open-source communities, driving innovation and ensuring protocol longevity. - -## Engagement Pillars - -### 1. Research Partnerships - -#### University Research Program -- **Objective**: Collaborate on cutting-edge blockchain and AI research -- **Target Institutions**: Top 50 computer science and AI research universities -- **Program Elements**: - - Joint research grants ($50K-$200K per project) - - PhD fellowships (5 per year) - - Post-doctoral positions (3 per year) - - Access to AITBC testnet and data - - Co-authored publications - -#### Research Focus Areas -1. **Consensus Mechanisms** - - Hybrid PoA/PoS optimization - - Energy-efficient validation - - Game theory applications - -2. **Scalability Solutions** - - Sharding algorithms - - Rollup optimizations - - Cross-chain protocols - -3. **Privacy & Security** - - Zero-knowledge proofs for AI - - Secure multi-party computation - - Cryptographic protocols - -4. **AI/ML on Blockchain** - - Federated learning - - Verifiable computation - - Incentive mechanisms - -### 2. Open Source Foundation Engagement - -#### Foundation Partnerships -- **Linux Foundation**: Hyperledger collaboration -- **Apache Foundation**: AI/ML project integration -- **EFF**: Privacy and digital rights -- **Internet Archive**: Decentralized storage -- **Mozilla**: Open web and standards - -#### Contribution Strategy -- **Code Contributions**: Core protocol improvements -- **Documentation**: Technical guides and tutorials -- **Standards**: Participation in working groups -- **Tooling**: Development and testing tools - -### 3. Educational Programs - -#### Academic Curriculum -- **Course Materials**: Blockchain and AI curriculum -- **Lab Exercises**: Hands-on AITBC development -- **Textbooks**: Chapter contributions -- **Online Courses**: MOOC partnerships - -#### Student Programs -- **Internships**: Summer and year-round -- **Hackathons**: University-hosted events -- **Competitions**: AI/ML challenges -- **Scholarships**: Merit-based awards - -### 4. Community Building - -#### Academic Network -- **Conferences**: Sponsorship and participation -- **Workshops**: Technical deep dives -- **Seminars**: Guest lecture series -- **Symposia**: Annual research symposium - -#### Open Source Community -- **Contributor Program**: Mentorship and support -- **Bug Bounties**: Academic-focused rewards -- **Documentation**: Community-driven -- **Localization**: Multi-language support - -## Implementation Roadmap - -### Phase 1: Foundation (Months 1-3) -- [ ] Establish academic relations team -- [ ] Create partnership templates -- [ ] Launch initial outreach (10 universities) -- [ ] Join 2 open source foundations -- [ ] Create educational materials - -### Phase 2: Expansion (Months 4-6) -- [ ] Sign 5 university partnerships -- [ ] Launch first research grants -- [ ] Establish PhD fellowship program -- [ ] Host inaugural academic workshop -- [ ] Release open source tools - -### Phase 3: Growth (Months 7-12) -- [ ] Expand to 20 university partners -- [ ] Fund 15 research projects -- [ ] Establish research chair positions -- [ ] Launch annual symposium -- [ ] Achieve 100+ academic contributors - -### Phase 4: Maturity (Months 13-24) -- [ ] Global reach (50+ universities) -- [ ] Self-sustaining research fund -- [ ] Protocol governance participation -- [ ] Industry-academia bridge program -- [ ] Measurable impact metrics - -## Partnership Models - -### Research Collaboration Agreement -```yaml -Duration: 3 years -Funding: $100K/year -Deliverables: - - 2 joint publications/year - - 1 prototype implementation - - Student participation - - Data sharing agreement -IP Rights: - - Joint ownership - - Open source publication - - Patent pool participation -``` - -### Educational Partnership -```yaml -Duration: 5 years -Support: - - Curriculum development - - Lab equipment grants - - Student scholarships - - Faculty training -Benefits: - - Early talent access - - Research pipeline - - Brand visibility - - Community building -``` - -### Open Source Contribution -```yaml -Type: In-kind contribution -Scope: - - Core protocol improvements - - Documentation - - Tool development - - Community support -Recognition: - - Project attribution - - Conference speaking - - Advisory board seat - - Technical leadership -``` - -## Success Metrics - -### Research Impact -- **Publications**: 20+ peer-reviewed papers/year -- **Citations**: 1000+ citations/year -- **Patents**: 5+ filed/year -- **Grants**: $2M+ in external funding - -### Community Engagement -- **Universities**: 50+ active partners -- **Students**: 500+ participants -- **Contributors**: 100+ active developers -- **Projects**: 30+ open source contributions - -### Protocol Evolution -- **Standards**: 10+ RFC/EIP contributions -- **Implementations**: 5+ major features -- **Adoption**: 20+ academic networks -- **Innovation**: 3+ breakthrough technologies - -## Governance Structure - -### Academic Advisory Board -- **Composition**: 15 members (10 academic, 5 industry) -- **Term**: 2 years, renewable once -- **Meetings**: Quarterly -- **Responsibilities**: - - Research direction guidance - - Partnership approval - - Fund allocation - - Quality assurance - -### Technical Steering Committee -- **Composition**: 20 members (technical leads) -- **Selection**: Merit-based, community vote -- **Term**: 1 year -- **Scope**: - - Technical roadmap - - Standards compliance - - Code quality - - Security review - -### Community Council -- **Composition**: Open to all contributors -- **Leadership**: Elected representatives -- **Meetings**: Monthly -- **Focus**: - - Community health - - Contributor support - - Documentation - - Outreach - -## Resource Allocation - -### Annual Budget: $5M - -| Category | Amount | Purpose | -|----------|--------|---------| -| Research Grants | $2M | 20 projects @ $100K | -| Fellowships | $1M | 20 PhDs @ $50K | -| Educational Programs | $500K | Materials, workshops | -| Community Support | $500K | Contributors, events | -| Infrastructure | $500K | Testnet, tools | -| Operations | $500K | Team, administration | - -### In-Kind Contributions -- **Compute Resources**: GPU clusters for research -- **Data Access**: Anonymized network data -- **Expertise**: Technical mentorship -- **Platform**: Testnet and tools - -## Engagement Activities - -### Annual Academic Summit -- **Duration**: 3 days -- **Participants**: 300+ (researchers, students, industry) -- **Content**: Papers, workshops, keynotes -- **Outcomes**: Proceedings, collaborations - -### Quarterly Workshops -- **Format**: Virtual/In-person hybrid -- **Topics**: Technical deep dives -- **Participants**: 50-100 -- **Goal**: Knowledge sharing - -### Monthly Seminars -- **Format**: Online presentations -- **Speakers**: Academic and industry experts -- **Audience**: Open to all -- **Archive**: YouTube, documentation - -### Continuous Programs -- **Office Hours**: Expert consultation -- **Mentorship**: 1-on-1 guidance -- **Review**: Code and paper reviews -- **Collaboration**: Project matching - -## Communication Channels - -### Primary Channels -- **Website**: academic.aitbc.io -- **Newsletter**: Monthly updates -- **Discord**: Academic community -- **LinkedIn**: Professional network - -### Academic Networks -- **ResearchGate**: Publication sharing -- **arXiv**: Preprint server -- **Google Scholar**: Citation tracking -- **ORCID**: Researcher identification - -### Open Source Platforms -- **GitHub**: Code and documentation -- **GitLab**: Alternative hosting -- **ReadTheDocs**: Documentation -- **Stack Overflow**: Q&A support - -## Evaluation Framework - -### Quarterly Reviews -- **Metrics Collection**: KPI tracking -- **Stakeholder Feedback**: Surveys, interviews -- **Progress Assessment**: Milestone completion -- **Adjustment**: Strategy refinement - -### Annual Assessment -- **Impact Analysis**: Research outcomes -- **ROI Calculation**: Resource efficiency -- **Strategic Review**: Long-term alignment -- **Planning**: Next year goals - -## Risk Management - -### Academic Risks -- **Funding Dependency**: Diversify sources -- **Personnel Turnover**: Succession planning -- **IP Conflicts**: Clear agreements -- **Publication Delays**: Open access preprints - -### Open Source Risks -- **License Compliance**: Legal review -- **Security Vulnerabilities**: Bug bounties -- **Community Toxicity**: Code of conduct -- **Project Forking**: Governance clarity - -### Mitigation Strategies -- **Legal Framework**: Clear agreements -- **Insurance**: Professional liability -- **Monitoring**: Continuous oversight -- **Contingency**: Backup plans - -## Contact Information - -### Academic Relations -- **Email**: academic@aitbc.io -- **Phone**: +1-555-ACADEMIC -- **Office**: Boston, MA / Zurich, CH - -### Open Source -- **Email**: opensource@aitbc.io -- **GitHub**: @aitbc-foundation -- **Discord**: #opensource channel - -### General Inquiries -- **Email**: info@aitbc.io -- **Website**: https://aitbc.io/academic - ---- - -*This framework is a living document that will evolve based on community feedback and partnership experiences. Regular reviews ensure alignment with our mission and values.* diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/__init__.py b/enterprise-connectors/python-sdk/aitbc_enterprise/__init__.py deleted file mode 100644 index 322e7149..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -""" -AITBC Enterprise Connectors SDK - -Python SDK for integrating AITBC with enterprise systems including -payment processors, ERP systems, and other business applications. -""" - -__version__ = "1.0.0" -__author__ = "AITBC Team" - -from .core import AITBCClient, ConnectorConfig -from .base import BaseConnector -from .exceptions import ( - AITBCError, - AuthenticationError, - RateLimitError, - APIError, - ConfigurationError -) - -__all__ = [ - "AITBCClient", - "ConnectorConfig", - "BaseConnector", - "AITBCError", - "AuthenticationError", - "RateLimitError", - "APIError", - "ConfigurationError", -] diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/auth.py b/enterprise-connectors/python-sdk/aitbc_enterprise/auth.py deleted file mode 100644 index 82cf79b6..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/auth.py +++ /dev/null @@ -1,207 +0,0 @@ -""" -Authentication handlers for AITBC Enterprise Connectors -""" - -import base64 -import hashlib -import hmac -import json -import time -from abc import ABC, abstractmethod -from typing import Dict, Any, Optional -from datetime import datetime, timedelta - -from .core import ConnectorConfig -from .exceptions import AuthenticationError - - -class AuthHandler(ABC): - """Abstract base class for authentication handlers""" - - @abstractmethod - async def get_headers(self) -> Dict[str, str]: - """Get authentication headers""" - pass - - -class BearerAuthHandler(AuthHandler): - """Bearer token authentication""" - - def __init__(self, config: ConnectorConfig): - self.api_key = config.api_key - - async def get_headers(self) -> Dict[str, str]: - """Get Bearer token headers""" - return { - "Authorization": f"Bearer {self.api_key}" - } - - -class BasicAuthHandler(AuthHandler): - """Basic authentication""" - - def __init__(self, config: ConnectorConfig): - self.username = config.auth_config.get("username") - self.password = config.auth_config.get("password") - - async def get_headers(self) -> Dict[str, str]: - """Get Basic auth headers""" - if not self.username or not self.password: - raise AuthenticationError("Username and password required for Basic auth") - - credentials = f"{self.username}:{self.password}" - encoded = base64.b64encode(credentials.encode()).decode() - - return { - "Authorization": f"Basic {encoded}" - } - - -class APIKeyAuthHandler(AuthHandler): - """API key authentication (custom header)""" - - def __init__(self, config: ConnectorConfig): - self.api_key = config.api_key - self.header_name = config.auth_config.get("header_name", "X-API-Key") - - async def get_headers(self) -> Dict[str, str]: - """Get API key headers""" - return { - self.header_name: self.api_key - } - - -class HMACAuthHandler(AuthHandler): - """HMAC signature authentication""" - - def __init__(self, config: ConnectorConfig): - self.api_key = config.api_key - self.secret = config.auth_config.get("secret") - self.algorithm = config.auth_config.get("algorithm", "sha256") - - async def get_headers(self) -> Dict[str, str]: - """Get HMAC signature headers""" - if not self.secret: - raise AuthenticationError("Secret required for HMAC auth") - - timestamp = str(int(time.time())) - message = f"{timestamp}:{self.api_key}" - - signature = hmac.new( - self.secret.encode(), - message.encode(), - getattr(hashlib, self.algorithm) - ).hexdigest() - - return { - "X-API-Key": self.api_key, - "X-Timestamp": timestamp, - "X-Signature": signature - } - - -class OAuth2Handler(AuthHandler): - """OAuth 2.0 authentication""" - - def __init__(self, config: ConnectorConfig): - self.client_id = config.auth_config.get("client_id") - self.client_secret = config.auth_config.get("client_secret") - self.token_url = config.auth_config.get("token_url") - self.scope = config.auth_config.get("scope", "") - - self._access_token = None - self._refresh_token = None - self._expires_at = None - - async def get_headers(self) -> Dict[str, str]: - """Get OAuth 2.0 headers""" - if not self._is_token_valid(): - await self._refresh_access_token() - - return { - "Authorization": f"Bearer {self._access_token}" - } - - def _is_token_valid(self) -> bool: - """Check if access token is valid""" - if not self._access_token or not self._expires_at: - return False - - # Refresh 5 minutes before expiry - return datetime.utcnow() < (self._expires_at - timedelta(minutes=5)) - - async def _refresh_access_token(self): - """Refresh OAuth 2.0 access token""" - import aiohttp - - data = { - "grant_type": "client_credentials", - "client_id": self.client_id, - "client_secret": self.client_secret, - "scope": self.scope - } - - async with aiohttp.ClientSession() as session: - async with session.post(self.token_url, data=data) as response: - if response.status != 200: - raise AuthenticationError(f"OAuth token request failed: {response.status}") - - token_data = await response.json() - - self._access_token = token_data["access_token"] - self._refresh_token = token_data.get("refresh_token") - - expires_in = token_data.get("expires_in", 3600) - self._expires_at = datetime.utcnow() + timedelta(seconds=expires_in) - - -class CertificateAuthHandler(AuthHandler): - """Certificate-based authentication""" - - def __init__(self, config: ConnectorConfig): - self.cert_path = config.auth_config.get("cert_path") - self.key_path = config.auth_config.get("key_path") - self.passphrase = config.auth_config.get("passphrase") - - async def get_headers(self) -> Dict[str, str]: - """Certificate auth uses client cert, not headers""" - return {} - - def get_ssl_context(self): - """Get SSL context for certificate authentication""" - import ssl - - context = ssl.create_default_context() - - if self.cert_path and self.key_path: - context.load_cert_chain( - self.cert_path, - self.key_path, - password=self.passphrase - ) - - return context - - -class AuthHandlerFactory: - """Factory for creating authentication handlers""" - - @staticmethod - def create(config: ConnectorConfig) -> AuthHandler: - """Create appropriate auth handler based on config""" - auth_type = config.auth_type.lower() - - if auth_type == "bearer": - return BearerAuthHandler(config) - elif auth_type == "basic": - return BasicAuthHandler(config) - elif auth_type == "api_key": - return APIKeyAuthHandler(config) - elif auth_type == "hmac": - return HMACAuthHandler(config) - elif auth_type == "oauth2": - return OAuth2Handler(config) - elif auth_type == "certificate": - return CertificateAuthHandler(config) - else: - raise AuthenticationError(f"Unsupported auth type: {auth_type}") diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/base.py b/enterprise-connectors/python-sdk/aitbc_enterprise/base.py deleted file mode 100644 index b03fc00c..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/base.py +++ /dev/null @@ -1,369 +0,0 @@ -""" -Base connector class for AITBC Enterprise Connectors -""" - -import asyncio -import logging -from abc import ABC, abstractmethod -from typing import Dict, Any, Optional, List, Union, Callable, Awaitable -from datetime import datetime -from dataclasses import dataclass -import json - -from .core import AITBCClient, ConnectorConfig -from .exceptions import AITBCError, ConnectorError, ValidationError -from .webhooks import WebhookHandler -from .validators import BaseValidator - - -@dataclass -class OperationResult: - """Result of a connector operation""" - success: bool - data: Optional[Dict[str, Any]] = None - error: Optional[str] = None - metadata: Optional[Dict[str, Any]] = None - timestamp: datetime = None - - def __post_init__(self): - if self.timestamp is None: - self.timestamp = datetime.utcnow() - - -@dataclass -class Transaction: - """Standard transaction representation""" - id: str - amount: float - currency: str - status: str - created_at: datetime - updated_at: datetime - metadata: Dict[str, Any] = None - - def to_dict(self) -> Dict[str, Any]: - return { - "id": self.id, - "amount": self.amount, - "currency": self.currency, - "status": self.status, - "created_at": self.created_at.isoformat(), - "updated_at": self.updated_at.isoformat(), - "metadata": self.metadata or {} - } - - -class BaseConnector(ABC): - """Base class for all enterprise connectors""" - - def __init__( - self, - client: AITBCClient, - config: ConnectorConfig, - validator: Optional[BaseValidator] = None, - webhook_handler: Optional[WebhookHandler] = None - ): - self.client = client - self.config = config - self.logger = logging.getLogger(f"aitbc.{self.__class__.__name__}") - - # Injected dependencies - self.validator = validator - self.webhook_handler = webhook_handler - - # Connector state - self._initialized = False - self._last_sync = None - - # Event handlers - self._operation_handlers: Dict[str, List[Callable]] = {} - - # Metrics - self._operation_count = 0 - self._error_count = 0 - - async def initialize(self) -> None: - """Initialize the connector""" - if self._initialized: - return - - try: - # Perform connector-specific initialization - await self._initialize() - - # Set up webhooks if configured - if self.config.webhook_endpoint and self.webhook_handler: - await self._setup_webhooks() - - # Register event handlers - self._register_handlers() - - self._initialized = True - self.logger.info(f"{self.__class__.__name__} initialized") - - except Exception as e: - self.logger.error(f"Failed to initialize {self.__class__.__name__}: {e}") - raise ConnectorError(f"Initialization failed: {e}") - - async def cleanup(self) -> None: - """Cleanup connector resources""" - try: - # Perform connector-specific cleanup - await self._cleanup() - - # Cleanup webhooks - if self.webhook_handler: - await self.webhook_handler.cleanup() - - self._initialized = False - self.logger.info(f"{self.__class__.__name__} cleaned up") - - except Exception as e: - self.logger.error(f"Error during cleanup: {e}") - - async def execute_operation( - self, - operation: str, - data: Dict[str, Any], - **kwargs - ) -> OperationResult: - """Execute an operation with validation and error handling""" - if not self._initialized: - await self.initialize() - - start_time = datetime.utcnow() - - try: - # Validate input if validator is configured - if self.validator: - await self.validator.validate(operation, data) - - # Pre-operation hook - await self._before_operation(operation, data) - - # Execute the operation - result = await self._execute_operation(operation, data, **kwargs) - - # Post-operation hook - await self._after_operation(operation, data, result) - - # Update metrics - self._operation_count += 1 - - # Emit operation event - await self._emit_operation_event(operation, result) - - return result - - except Exception as e: - self._error_count += 1 - self.logger.error(f"Operation {operation} failed: {e}") - - error_result = OperationResult( - success=False, - error=str(e), - timestamp=datetime.utcnow() - ) - - # Emit error event - await self._emit_operation_event(f"{operation}.error", error_result) - - return error_result - - finally: - # Log operation duration - duration = (datetime.utcnow() - start_time).total_seconds() - self.logger.debug(f"Operation {operation} completed in {duration:.3f}s") - - async def batch_execute( - self, - operations: List[Dict[str, Any]], - max_concurrent: int = 10 - ) -> List[OperationResult]: - """Execute multiple operations concurrently""" - semaphore = asyncio.Semaphore(max_concurrent) - - async def _execute_with_semaphore(op_data): - async with semaphore: - return await self.execute_operation(**op_data) - - tasks = [_execute_with_semaphore(op) for op in operations] - return await asyncio.gather(*tasks, return_exceptions=True) - - async def sync( - self, - since: Optional[datetime] = None, - filters: Optional[Dict[str, Any]] = None - ) -> Dict[str, Any]: - """Synchronize data with external system""" - if not self._initialized: - await self.initialize() - - try: - # Perform sync - result = await self._sync(since, filters) - - # Update last sync timestamp - self._last_sync = datetime.utcnow() - - return result - - except Exception as e: - self.logger.error(f"Sync failed: {e}") - raise ConnectorError(f"Sync failed: {e}") - - async def validate_webhook(self, payload: Dict[str, Any], signature: str) -> bool: - """Validate incoming webhook payload""" - if not self.webhook_handler: - return False - - return await self.webhook_handler.validate(payload, signature) - - async def handle_webhook(self, payload: Dict[str, Any]) -> Dict[str, Any]: - """Handle incoming webhook""" - if not self.webhook_handler: - raise ConnectorError("Webhook handler not configured") - - return await self.webhook_handler.handle(payload) - - def add_operation_handler( - self, - operation: str, - handler: Callable[[Dict[str, Any]], Awaitable[None]] - ): - """Add handler for specific operation""" - if operation not in self._operation_handlers: - self._operation_handlers[operation] = [] - self._operation_handlers[operation].append(handler) - - def remove_operation_handler( - self, - operation: str, - handler: Callable - ): - """Remove handler for specific operation""" - if operation in self._operation_handlers: - try: - self._operation_handlers[operation].remove(handler) - except ValueError: - pass - - # Abstract methods to be implemented by subclasses - - @abstractmethod - async def _initialize(self) -> None: - """Connector-specific initialization""" - pass - - @abstractmethod - async def _cleanup(self) -> None: - """Connector-specific cleanup""" - pass - - @abstractmethod - async def _execute_operation( - self, - operation: str, - data: Dict[str, Any], - **kwargs - ) -> OperationResult: - """Execute connector-specific operation""" - pass - - async def _sync( - self, - since: Optional[datetime], - filters: Optional[Dict[str, Any]] - ) -> Dict[str, Any]: - """Default sync implementation""" - return { - "synced_at": datetime.utcnow().isoformat(), - "records": 0, - "message": "Sync not implemented" - } - - # Hook methods - - async def _before_operation( - self, - operation: str, - data: Dict[str, Any] - ) -> None: - """Called before operation execution""" - pass - - async def _after_operation( - self, - operation: str, - data: Dict[str, Any], - result: OperationResult - ) -> None: - """Called after operation execution""" - pass - - # Private methods - - async def _setup_webhooks(self) -> None: - """Setup webhook endpoints""" - if not self.webhook_handler: - return - - await self.webhook_handler.setup( - endpoint=self.config.webhook_endpoint, - secret=self.config.webhook_secret - ) - - def _register_handlers(self) -> None: - """Register default event handlers""" - # Register with client if needed - pass - - async def _emit_operation_event( - self, - event: str, - result: OperationResult - ) -> None: - """Emit operation event to handlers""" - if event in self._operation_handlers: - tasks = [] - for handler in self._operation_handlers[event]: - try: - tasks.append(handler(result.to_dict() if result.data else {})) - except Exception as e: - self.logger.error(f"Handler error: {e}") - - if tasks: - await asyncio.gather(*tasks, return_exceptions=True) - - # Properties - - @property - def is_initialized(self) -> bool: - """Check if connector is initialized""" - return self._initialized - - @property - def last_sync(self) -> Optional[datetime]: - """Get last sync timestamp""" - return self._last_sync - - @property - def metrics(self) -> Dict[str, Any]: - """Get connector metrics""" - return { - "operation_count": self._operation_count, - "error_count": self._error_count, - "error_rate": self._error_count / max(self._operation_count, 1), - "last_sync": self._last_sync.isoformat() if self._last_sync else None - } - - # Context manager - - async def __aenter__(self): - """Async context manager entry""" - await self.initialize() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Async context manager exit""" - await self.cleanup() diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/core.py b/enterprise-connectors/python-sdk/aitbc_enterprise/core.py deleted file mode 100644 index 5d36e925..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/core.py +++ /dev/null @@ -1,296 +0,0 @@ -""" -Core components for AITBC Enterprise Connectors SDK -""" - -import asyncio -import logging -from typing import Optional, Dict, Any, Callable, Awaitable -from dataclasses import dataclass, field -from datetime import datetime, timedelta -import aiohttp -from aiohttp import ClientTimeout, ClientSession - -from .auth import AuthHandler -from .rate_limiter import RateLimiter -from .metrics import MetricsCollector -from .exceptions import ConfigurationError - - -@dataclass -class ConnectorConfig: - """Configuration for AITBC connectors""" - - # API Configuration - base_url: str - api_key: str - api_version: str = "v1" - - # Connection Settings - timeout: float = 30.0 - max_connections: int = 100 - max_retries: int = 3 - retry_backoff: float = 1.0 - - # Rate Limiting - rate_limit: Optional[int] = None # Requests per second - burst_limit: Optional[int] = None - - # Authentication - auth_type: str = "bearer" # bearer, basic, custom - auth_config: Dict[str, Any] = field(default_factory=dict) - - # Webhooks - webhook_secret: Optional[str] = None - webhook_endpoint: Optional[str] = None - - # Monitoring - enable_metrics: bool = True - metrics_endpoint: Optional[str] = None - - # Logging - log_level: str = "INFO" - log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - - # Enterprise Features - enterprise_id: Optional[str] = None - tenant_id: Optional[str] = None - region: Optional[str] = None - - def __post_init__(self): - """Validate configuration""" - if not self.base_url: - raise ConfigurationError("base_url is required") - if not self.api_key: - raise ConfigurationError("api_key is required") - - # Set up logging - logging.basicConfig( - level=getattr(logging, self.log_level.upper()), - format=self.log_format - ) - - -class AITBCClient: - """Main client for AITBC Enterprise Connectors""" - - def __init__( - self, - config: ConnectorConfig, - session: Optional[ClientSession] = None, - auth_handler: Optional[AuthHandler] = None, - rate_limiter: Optional[RateLimiter] = None, - metrics: Optional[MetricsCollector] = None - ): - self.config = config - self.logger = logging.getLogger(f"aitbc.{self.__class__.__name__}") - - # Initialize components with dependency injection - self._session = session or self._create_session() - self._auth = auth_handler or AuthHandler(config) - self._rate_limiter = rate_limiter or RateLimiter(config) - self._metrics = metrics or MetricsCollector(config) if config.enable_metrics else None - - # Event handlers - self._event_handlers: Dict[str, list] = {} - - # Connection state - self._connected = False - self._last_activity = None - - def _create_session(self) -> ClientSession: - """Create HTTP session with configuration""" - timeout = ClientTimeout(total=self.config.timeout) - - # Set up headers - headers = { - "User-Agent": f"AITBC-SDK/{__version__}", - "Accept": "application/json", - "Content-Type": "application/json" - } - - return ClientSession( - timeout=timeout, - headers=headers, - connector=aiohttp.TCPConnector( - limit=self.config.max_connections, - limit_per_host=self.config.max_connections // 4 - ) - ) - - async def connect(self) -> None: - """Establish connection to AITBC""" - if self._connected: - return - - try: - # Test connection - await self._test_connection() - - # Start metrics collection - if self._metrics: - await self._metrics.start() - - self._connected = True - self._last_activity = datetime.utcnow() - - self.logger.info("Connected to AITBC") - await self._emit_event("connected", {"timestamp": self._last_activity}) - - except Exception as e: - self.logger.error(f"Failed to connect: {e}") - raise - - async def disconnect(self) -> None: - """Close connection to AITBC""" - if not self._connected: - return - - try: - # Stop metrics collection - if self._metrics: - await self._metrics.stop() - - # Close session - await self._session.close() - - self._connected = False - self.logger.info("Disconnected from AITBC") - await self._emit_event("disconnected", {"timestamp": datetime.utcnow()}) - - except Exception as e: - self.logger.error(f"Error during disconnect: {e}") - - async def request( - self, - method: str, - path: str, - **kwargs - ) -> Dict[str, Any]: - """Make authenticated request to AITBC API""" - if not self._connected: - await self.connect() - - # Apply rate limiting - if self.config.rate_limit: - await self._rate_limiter.acquire() - - # Prepare request - url = f"{self.config.base_url}/{self.config.api_version}/{path.lstrip('/')}" - - # Add authentication - headers = kwargs.pop("headers", {}) - auth_headers = await self._auth.get_headers() - headers.update(auth_headers) - - # Retry logic - last_exception = None - for attempt in range(self.config.max_retries + 1): - try: - start_time = datetime.utcnow() - - async with self._session.request( - method, - url, - headers=headers, - **kwargs - ) as response: - # Record metrics - if self._metrics: - duration = (datetime.utcnow() - start_time).total_seconds() - await self._metrics.record_request( - method=method, - path=path, - status=response.status, - duration=duration - ) - - # Handle response - if response.status == 429: - retry_after = int(response.headers.get("Retry-After", self.config.retry_backoff)) - await asyncio.sleep(retry_after) - continue - - response.raise_for_status() - - data = await response.json() - self._last_activity = datetime.utcnow() - - return data - - except aiohttp.ClientError as e: - last_exception = e - if attempt < self.config.max_retries: - backoff = self.config.retry_backoff * (2 ** attempt) - self.logger.warning(f"Request failed, retrying in {backoff}s: {e}") - await asyncio.sleep(backoff) - else: - self.logger.error(f"Request failed after {self.config.max_retries} retries: {e}") - raise - - raise last_exception - - async def get(self, path: str, **kwargs) -> Dict[str, Any]: - """Make GET request""" - return await self.request("GET", path, **kwargs) - - async def post(self, path: str, **kwargs) -> Dict[str, Any]: - """Make POST request""" - return await self.request("POST", path, **kwargs) - - async def put(self, path: str, **kwargs) -> Dict[str, Any]: - """Make PUT request""" - return await self.request("PUT", path, **kwargs) - - async def delete(self, path: str, **kwargs) -> Dict[str, Any]: - """Make DELETE request""" - return await self.request("DELETE", path, **kwargs) - - def on(self, event: str, handler: Callable[[Dict[str, Any]], Awaitable[None]]): - """Register event handler""" - if event not in self._event_handlers: - self._event_handlers[event] = [] - self._event_handlers[event].append(handler) - - def off(self, event: str, handler: Callable): - """Unregister event handler""" - if event in self._event_handlers: - try: - self._event_handlers[event].remove(handler) - except ValueError: - pass - - async def _emit_event(self, event: str, data: Dict[str, Any]): - """Emit event to registered handlers""" - if event in self._event_handlers: - tasks = [] - for handler in self._event_handlers[event]: - tasks.append(handler(data)) - - if tasks: - await asyncio.gather(*tasks, return_exceptions=True) - - async def _test_connection(self): - """Test connection to AITBC""" - try: - await self.get("/health") - except Exception as e: - raise ConnectionError(f"Failed to connect to AITBC: {e}") - - @property - def is_connected(self) -> bool: - """Check if client is connected""" - return self._connected - - @property - def last_activity(self) -> Optional[datetime]: - """Get last activity timestamp""" - return self._last_activity - - async def __aenter__(self): - """Async context manager entry""" - await self.connect() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Async context manager exit""" - await self.disconnect() diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/__init__.py b/enterprise-connectors/python-sdk/aitbc_enterprise/erp/__init__.py deleted file mode 100644 index 2979ac6b..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -ERP system connectors for AITBC Enterprise -""" - -from .base import ERPConnector, ERPDataModel, ProtocolHandler, DataMapper -from .sap import SAPConnector -from .oracle import OracleConnector -from .netsuite import NetSuiteConnector - -__all__ = [ - "ERPConnector", - "ERPDataModel", - "ProtocolHandler", - "DataMapper", - "SAPConnector", - "OracleConnector", - "NetSuiteConnector", -] diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/base.py b/enterprise-connectors/python-sdk/aitbc_enterprise/erp/base.py deleted file mode 100644 index a2acbdc5..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/base.py +++ /dev/null @@ -1,501 +0,0 @@ -""" -Base classes for ERP connectors with plugin architecture -""" - -import asyncio -import json -from abc import ABC, abstractmethod -from typing import Dict, Any, List, Optional, Type, Union, Callable -from datetime import datetime, timedelta -from dataclasses import dataclass, field -from enum import Enum -import importlib - -from ..base import BaseConnector, OperationResult -from ..core import ConnectorConfig -from ..exceptions import ERPError, ValidationError - - -class ERPSystem(Enum): - """Supported ERP systems""" - SAP = "sap" - ORACLE = "oracle" - NETSUITE = "netsuite" - MICROSOFT_DYNAMICS = "dynamics" - SALESFORCE = "salesforce" - - -class Protocol(Enum): - """Supported protocols""" - REST = "rest" - SOAP = "soap" - ODATA = "odata" - IDOC = "idoc" - BAPI = "bapi" - SUITE_TALK = "suite_talk" - - -@dataclass -class ERPDataModel: - """ERP data model definition""" - entity_type: str - fields: Dict[str, Any] - relationships: Dict[str, str] = field(default_factory=dict) - validations: Dict[str, Any] = field(default_factory=dict) - - def to_dict(self) -> Dict[str, Any]: - return { - "entity_type": self.entity_type, - "fields": self.fields, - "relationships": self.relationships, - "validations": self.validations - } - - -@dataclass -class SyncResult: - """Synchronization result""" - entity_type: str - synced_count: int - failed_count: int - errors: List[str] = field(default_factory=list) - last_sync: datetime = field(default_factory=datetime.utcnow) - - def to_dict(self) -> Dict[str, Any]: - return { - "entity_type": self.entity_type, - "synced_count": self.synced_count, - "failed_count": self.failed_count, - "errors": self.errors, - "last_sync": self.last_sync.isoformat() - } - - -class ProtocolHandler(ABC): - """Abstract base class for protocol handlers""" - - def __init__(self, config: ConnectorConfig): - self.config = config - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - @abstractmethod - async def connect(self) -> bool: - """Establish protocol connection""" - pass - - @abstractmethod - async def disconnect(self): - """Close protocol connection""" - pass - - @abstractmethod - async def send_request(self, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Send request via protocol""" - pass - - @abstractmethod - async def batch_request(self, requests: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Send batch requests""" - pass - - -class DataMapper: - """Maps data between AITBC and ERP formats""" - - def __init__(self, mappings: Dict[str, Dict[str, str]]): - self.mappings = mappings - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - def to_erp(self, entity_type: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Map AITBC format to ERP format""" - if entity_type not in self.mappings: - raise ValidationError(f"No mapping for entity type: {entity_type}") - - mapping = self.mappings[entity_type] - erp_data = {} - - for aitbc_field, erp_field in mapping.items(): - if aitbc_field in data: - erp_data[erp_field] = data[aitbc_field] - - return erp_data - - def from_erp(self, entity_type: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Map ERP format to AITBC format""" - if entity_type not in self.mappings: - raise ValidationError(f"No mapping for entity type: {entity_type}") - - mapping = self.mappings[entity_type] - aitbc_data = {} - - # Reverse mapping - reverse_mapping = {v: k for k, v in mapping.items()} - - for erp_field, value in data.items(): - if erp_field in reverse_mapping: - aitbc_data[reverse_mapping[erp_field]] = value - - return aitbc_data - - -class BatchProcessor: - """Handles batch operations for ERP connectors""" - - def __init__(self, batch_size: int = 100, max_concurrent: int = 5): - self.batch_size = batch_size - self.max_concurrent = max_concurrent - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - async def process_batches( - self, - items: List[Dict[str, Any]], - processor: Callable[[List[Dict[str, Any]]], List[Dict[str, Any]]] - ) -> List[Dict[str, Any]]: - """Process items in batches""" - results = [] - semaphore = asyncio.Semaphore(self.max_concurrent) - - async def process_batch(batch): - async with semaphore: - try: - return await processor(batch) - except Exception as e: - self.logger.error(f"Batch processing failed: {e}") - return [{"error": str(e)} for _ in batch] - - # Create batches - batches = [ - items[i:i + self.batch_size] - for i in range(0, len(items), self.batch_size) - ] - - # Process batches concurrently - tasks = [process_batch(batch) for batch in batches] - batch_results = await asyncio.gather(*tasks, return_exceptions=True) - - # Flatten results - for result in batch_results: - if isinstance(result, list): - results.extend(result) - else: - results.append({"error": str(result)}) - - return results - - -class ChangeTracker: - """Tracks changes for delta synchronization""" - - def __init__(self): - self.last_syncs: Dict[str, datetime] = {} - self.change_logs: Dict[str, List[Dict[str, Any]]] = {} - - def update_last_sync(self, entity_type: str, timestamp: datetime): - """Update last sync timestamp""" - self.last_syncs[entity_type] = timestamp - - def get_last_sync(self, entity_type: str) -> Optional[datetime]: - """Get last sync timestamp""" - return self.last_syncs.get(entity_type) - - def log_change(self, entity_type: str, change: Dict[str, Any]): - """Log a change""" - if entity_type not in self.change_logs: - self.change_logs[entity_type] = [] - - self.change_logs[entity_type].append({ - **change, - "timestamp": datetime.utcnow() - }) - - def get_changes_since( - self, - entity_type: str, - since: datetime - ) -> List[Dict[str, Any]]: - """Get changes since timestamp""" - changes = self.change_logs.get(entity_type, []) - return [ - c for c in changes - if c["timestamp"] > since - ] - - -class ERPConnector(BaseConnector): - """Base class for ERP connectors with plugin architecture""" - - # Registry for protocol handlers - _protocol_registry: Dict[Protocol, Type[ProtocolHandler]] = {} - - def __init__( - self, - client: 'AITBCClient', - config: ConnectorConfig, - erp_system: ERPSystem, - protocol: Protocol, - data_mapper: Optional[DataMapper] = None - ): - super().__init__(client, config) - - self.erp_system = erp_system - self.protocol = protocol - - # Initialize components - self.protocol_handler = self._create_protocol_handler() - self.data_mapper = data_mapper or DataMapper({}) - self.batch_processor = BatchProcessor() - self.change_tracker = ChangeTracker() - - # ERP-specific configuration - self.erp_config = config.auth_config.get("erp", {}) - - # Data models - self.data_models: Dict[str, ERPDataModel] = {} - - @classmethod - def register_protocol( - cls, - protocol: Protocol, - handler_class: Type[ProtocolHandler] - ): - """Register a protocol handler""" - cls._protocol_registry[protocol] = handler_class - - def _create_protocol_handler(self) -> ProtocolHandler: - """Create protocol handler from registry""" - if self.protocol not in self._protocol_registry: - raise ERPError(f"No handler registered for protocol: {self.protocol}") - - handler_class = self._protocol_registry[self.protocol] - return handler_class(self.config) - - async def _initialize(self) -> None: - """Initialize ERP connector""" - # Connect via protocol - if not await self.protocol_handler.connect(): - raise ERPError(f"Failed to connect via {self.protocol}") - - # Load data models - await self._load_data_models() - - self.logger.info(f"{self.erp_system.value} connector initialized") - - async def _cleanup(self) -> None: - """Cleanup ERP connector""" - await self.protocol_handler.disconnect() - - async def _execute_operation( - self, - operation: str, - data: Dict[str, Any], - **kwargs - ) -> OperationResult: - """Execute ERP-specific operations""" - try: - if operation.startswith("create_"): - entity_type = operation[7:] # Remove "create_" prefix - return await self._create_entity(entity_type, data) - elif operation.startswith("update_"): - entity_type = operation[7:] # Remove "update_" prefix - return await self._update_entity(entity_type, data) - elif operation.startswith("delete_"): - entity_type = operation[7:] # Remove "delete_" prefix - return await self._delete_entity(entity_type, data) - elif operation == "sync": - return await self._sync_data(data) - elif operation == "batch_sync": - return await self._batch_sync(data) - else: - raise ValidationError(f"Unknown operation: {operation}") - - except Exception as e: - self.logger.error(f"ERP operation failed: {e}") - raise ERPError(f"Operation failed: {e}") - - async def _create_entity(self, entity_type: str, data: Dict[str, Any]) -> OperationResult: - """Create entity in ERP""" - # Map data to ERP format - erp_data = self.data_mapper.to_erp(entity_type, data) - - # Send to ERP - endpoint = f"/{entity_type}" - result = await self.protocol_handler.send_request(endpoint, erp_data) - - # Track change - self.change_tracker.log_change(entity_type, { - "action": "create", - "data": result - }) - - return OperationResult( - success=True, - data=result, - metadata={"entity_type": entity_type, "action": "create"} - ) - - async def _update_entity(self, entity_type: str, data: Dict[str, Any]) -> OperationResult: - """Update entity in ERP""" - entity_id = data.get("id") - if not entity_id: - raise ValidationError("Entity ID required for update") - - # Map data to ERP format - erp_data = self.data_mapper.to_erp(entity_type, data) - - # Send to ERP - endpoint = f"/{entity_type}/{entity_id}" - result = await self.protocol_handler.send_request(endpoint, erp_data, method="PUT") - - # Track change - self.change_tracker.log_change(entity_type, { - "action": "update", - "entity_id": entity_id, - "data": result - }) - - return OperationResult( - success=True, - data=result, - metadata={"entity_type": entity_type, "action": "update"} - ) - - async def _delete_entity(self, entity_type: str, data: Dict[str, Any]) -> OperationResult: - """Delete entity from ERP""" - entity_id = data.get("id") - if not entity_id: - raise ValidationError("Entity ID required for delete") - - # Send to ERP - endpoint = f"/{entity_type}/{entity_id}" - await self.protocol_handler.send_request(endpoint, {}, method="DELETE") - - # Track change - self.change_tracker.log_change(entity_type, { - "action": "delete", - "entity_id": entity_id - }) - - return OperationResult( - success=True, - metadata={"entity_type": entity_type, "action": "delete"} - ) - - async def _sync_data(self, data: Dict[str, Any]) -> OperationResult: - """Synchronize data from ERP""" - entity_type = data.get("entity_type") - since = data.get("since") - - if not entity_type: - raise ValidationError("entity_type required") - - # Get last sync if not provided - if not since: - since = self.change_tracker.get_last_sync(entity_type) - - # Query ERP for changes - endpoint = f"/{entity_type}" - params = {"since": since.isoformat()} if since else {} - - result = await self.protocol_handler.send_request(endpoint, params) - - # Map data to AITBC format - items = result.get("items", []) - mapped_items = [ - self.data_mapper.from_erp(entity_type, item) - for item in items - ] - - # Update last sync - self.change_tracker.update_last_sync(entity_type, datetime.utcnow()) - - return OperationResult( - success=True, - data={"items": mapped_items, "count": len(mapped_items)}, - metadata={"entity_type": entity_type, "since": since} - ) - - async def _batch_sync(self, data: Dict[str, Any]) -> OperationResult: - """Batch synchronize data""" - entity_type = data.get("entity_type") - items = data.get("items", []) - - if not entity_type or not items: - raise ValidationError("entity_type and items required") - - # Process in batches - batch_data = [{"entity_type": entity_type, "item": item} for item in items] - - results = await self.batch_processor.process_batches( - batch_data, - self._process_sync_batch - ) - - # Count successes and failures - successful = sum(1 for r in results if "error" not in r) - failed = len(results) - successful - - return OperationResult( - success=failed == 0, - data={"results": results}, - metadata={ - "entity_type": entity_type, - "total": len(items), - "successful": successful, - "failed": failed - } - ) - - async def _process_sync_batch(self, batch: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Process a sync batch""" - entity_type = batch[0]["entity_type"] - items = [b["item"] for b in batch] - - # Map to ERP format - erp_items = [ - self.data_mapper.to_erp(entity_type, item) - for item in items - ] - - # Send batch request - endpoint = f"/{entity_type}/batch" - results = await self.protocol_handler.batch_request([ - {"method": "POST", "endpoint": endpoint, "data": item} - for item in erp_items - ]) - - return results - - async def _load_data_models(self): - """Load ERP data models""" - # Default models - override in subclasses - self.data_models = { - "customer": ERPDataModel( - entity_type="customer", - fields={"id": str, "name": str, "email": str, "phone": str} - ), - "order": ERPDataModel( - entity_type="order", - fields={"id": str, "customer_id": str, "items": list, "total": float} - ), - "invoice": ERPDataModel( - entity_type="invoice", - fields={"id": str, "order_id": str, "amount": float, "status": str} - ) - } - - def register_data_model(self, model: ERPDataModel): - """Register a data model""" - self.data_models[model.entity_type] = model - - def get_data_model(self, entity_type: str) -> Optional[ERPDataModel]: - """Get data model by type""" - return self.data_models.get(entity_type) - - -# Protocol handler registry decorator -def register_protocol(protocol: Protocol): - """Decorator to register protocol handlers""" - def decorator(handler_class: Type[ProtocolHandler]): - ERPConnector.register_protocol(protocol, handler_class) - return handler_class - return decorator diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/netsuite.py b/enterprise-connectors/python-sdk/aitbc_enterprise/erp/netsuite.py deleted file mode 100644 index 1e44650b..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/netsuite.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -NetSuite ERP connector for AITBC Enterprise (Placeholder) -""" - -from .base import ERPConnector, ERPSystem, Protocol - - -class NetSuiteConnector(ERPConnector): - """NetSuite ERP connector with SuiteTalk support""" - - def __init__(self, client, config, netsuite_account, netsuite_consumer_key, netsuite_consumer_secret): - # TODO: Implement NetSuite connector - raise NotImplementedError("NetSuite connector not yet implemented") - - # TODO: Implement NetSuite-specific methods - # - SuiteTalk REST API - # - SuiteTalk SOAP web services - # - OAuth authentication - # - Data mapping for NetSuite records diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/oracle.py b/enterprise-connectors/python-sdk/aitbc_enterprise/erp/oracle.py deleted file mode 100644 index 469c97a0..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/oracle.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Oracle ERP connector for AITBC Enterprise (Placeholder) -""" - -from .base import ERPConnector, ERPSystem, Protocol - - -class OracleConnector(ERPConnector): - """Oracle ERP connector with REST and SOAP support""" - - def __init__(self, client, config, oracle_client_id, oracle_secret): - # TODO: Implement Oracle connector - raise NotImplementedError("Oracle connector not yet implemented") - - # TODO: Implement Oracle-specific methods - # - REST API calls - # - SOAP web services - # - Oracle authentication - # - Data mapping for Oracle modules diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/sap.py b/enterprise-connectors/python-sdk/aitbc_enterprise/erp/sap.py deleted file mode 100644 index a01e610b..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/erp/sap.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -SAP ERP connector for AITBC Enterprise (Placeholder) -""" - -from .base import ERPConnector, ERPSystem, Protocol - - -class SAPConnector(ERPConnector): - """SAP ERP connector with IDOC and BAPI support""" - - def __init__(self, client, config, sap_client): - # TODO: Implement SAP connector - raise NotImplementedError("SAP connector not yet implemented") - - # TODO: Implement SAP-specific methods - # - IDOC processing - # - BAPI calls - # - SAP authentication - # - Data mapping for SAP structures diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/exceptions.py b/enterprise-connectors/python-sdk/aitbc_enterprise/exceptions.py deleted file mode 100644 index 529e5212..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/exceptions.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Exception classes for AITBC Enterprise Connectors -""" - - -class AITBCError(Exception): - """Base exception for all AITBC errors""" - pass - - -class AuthenticationError(AITBCError): - """Raised when authentication fails""" - pass - - -class RateLimitError(AITBCError): - """Raised when rate limit is exceeded""" - def __init__(self, message: str, retry_after: int = None): - super().__init__(message) - self.retry_after = retry_after - - -class APIError(AITBCError): - """Raised when API request fails""" - def __init__(self, message: str, status_code: int = None, response: dict = None): - super().__init__(message) - self.status_code = status_code - self.response = response - - -class ConfigurationError(AITBCError): - """Raised when configuration is invalid""" - pass - - -class ConnectorError(AITBCError): - """Raised when connector operation fails""" - pass - - -class PaymentError(ConnectorError): - """Raised when payment operation fails""" - pass - - -class ValidationError(AITBCError): - """Raised when data validation fails""" - pass - - -class WebhookError(AITBCError): - """Raised when webhook processing fails""" - pass - - -class ERPError(ConnectorError): - """Raised when ERP operation fails""" - pass - - -class SyncError(ConnectorError): - """Raised when synchronization fails""" - pass - - -class TimeoutError(AITBCError): - """Raised when operation times out""" - pass diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/metrics.py b/enterprise-connectors/python-sdk/aitbc_enterprise/metrics.py deleted file mode 100644 index 70c2a6ed..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/metrics.py +++ /dev/null @@ -1,293 +0,0 @@ -""" -Metrics collection for AITBC Enterprise Connectors -""" - -import asyncio -import time -from typing import Dict, Any, Optional, List -from collections import defaultdict, deque -from dataclasses import dataclass, asdict -from datetime import datetime, timedelta -import json - -from .core import ConnectorConfig - - -@dataclass -class MetricPoint: - """Single metric data point""" - name: str - value: float - timestamp: datetime - tags: Dict[str, str] = None - - def to_dict(self) -> Dict[str, Any]: - return { - "name": self.name, - "value": self.value, - "timestamp": self.timestamp.isoformat(), - "tags": self.tags or {} - } - - -class MetricsCollector: - """Collects and manages metrics for connectors""" - - def __init__(self, config: ConnectorConfig): - self.config = config - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - # Metric storage - self._counters: Dict[str, float] = defaultdict(float) - self._gauges: Dict[str, float] = {} - self._histograms: Dict[str, deque] = defaultdict(lambda: deque(maxlen=1000)) - self._timers: Dict[str, List[float]] = defaultdict(list) - - # Runtime state - self._running = False - self._flush_task = None - self._buffer: List[MetricPoint] = [] - self._buffer_size = 1000 - - # Aggregated metrics - self._request_count = 0 - self._error_count = 0 - self._total_duration = 0.0 - self._last_flush = None - - async def start(self): - """Start metrics collection""" - if self._running: - return - - self._running = True - self._last_flush = datetime.utcnow() - - # Start periodic flush task - if self.config.metrics_endpoint: - self._flush_task = asyncio.create_task(self._flush_loop()) - - self.logger.info("Metrics collection started") - - async def stop(self): - """Stop metrics collection""" - if not self._running: - return - - self._running = False - - # Cancel flush task - if self._flush_task: - self._flush_task.cancel() - try: - await self._flush_task - except asyncio.CancelledError: - pass - - # Final flush - await self._flush_metrics() - - self.logger.info("Metrics collection stopped") - - def increment(self, name: str, value: float = 1.0, tags: Dict[str, str] = None): - """Increment counter metric""" - key = self._make_key(name, tags) - self._counters[key] += value - - # Add to buffer - self._add_to_buffer(name, value, tags) - - def gauge(self, name: str, value: float, tags: Dict[str, str] = None): - """Set gauge metric""" - key = self._make_key(name, tags) - self._gauges[key] = value - - # Add to buffer - self._add_to_buffer(name, value, tags) - - def histogram(self, name: str, value: float, tags: Dict[str, str] = None): - """Add value to histogram""" - key = self._make_key(name, tags) - self._histograms[key].append(value) - - # Add to buffer - self._add_to_buffer(name, value, tags) - - def timer(self, name: str, duration: float, tags: Dict[str, str] = None): - """Record timing metric""" - key = self._make_key(name, tags) - self._timers[key].append(duration) - - # Keep only last 1000 timings - if len(self._timers[key]) > 1000: - self._timers[key] = self._timers[key][-1000:] - - # Add to buffer - self._add_to_buffer(f"{name}_duration", duration, tags) - - async def record_request( - self, - method: str, - path: str, - status: int, - duration: float - ): - """Record request metrics""" - # Update aggregated metrics - self._request_count += 1 - self._total_duration += duration - - if status >= 400: - self._error_count += 1 - - # Record detailed metrics - tags = { - "method": method, - "path": path, - "status": str(status) - } - - self.increment("requests_total", 1.0, tags) - self.timer("request_duration", duration, tags) - - if status >= 400: - self.increment("errors_total", 1.0, tags) - - def get_metric(self, name: str, tags: Dict[str, str] = None) -> Optional[float]: - """Get current metric value""" - key = self._make_key(name, tags) - - if key in self._counters: - return self._counters[key] - elif key in self._gauges: - return self._gauges[key] - elif key in self._histograms: - values = list(self._histograms[key]) - return sum(values) / len(values) if values else 0 - elif key in self._timers: - values = self._timers[key] - return sum(values) / len(values) if values else 0 - - return None - - def get_summary(self) -> Dict[str, Any]: - """Get metrics summary""" - return { - "requests_total": self._request_count, - "errors_total": self._error_count, - "error_rate": self._error_count / max(self._request_count, 1), - "avg_duration": self._total_duration / max(self._request_count, 1), - "last_flush": self._last_flush.isoformat() if self._last_flush else None, - "metrics_count": len(self._counters) + len(self._gauges) + len(self._histograms) + len(self._timers) - } - - def _make_key(self, name: str, tags: Dict[str, str] = None) -> str: - """Create metric key with tags""" - if not tags: - return name - - tag_str = ",".join(f"{k}={v}" for k, v in sorted(tags.items())) - return f"{name}[{tag_str}]" - - def _add_to_buffer(self, name: str, value: float, tags: Dict[str, str] = None): - """Add metric point to buffer""" - point = MetricPoint( - name=name, - value=value, - timestamp=datetime.utcnow(), - tags=tags - ) - - self._buffer.append(point) - - # Flush if buffer is full - if len(self._buffer) >= self._buffer_size: - asyncio.create_task(self._flush_metrics()) - - async def _flush_loop(self): - """Periodic flush loop""" - while self._running: - try: - await asyncio.sleep(60) # Flush every minute - await self._flush_metrics() - except asyncio.CancelledError: - break - except Exception as e: - self.logger.error(f"Flush loop error: {e}") - - async def _flush_metrics(self): - """Flush metrics to endpoint""" - if not self.config.metrics_endpoint or not self._buffer: - return - - try: - import aiohttp - - # Prepare metrics payload - payload = { - "timestamp": datetime.utcnow().isoformat(), - "source": "aitbc-enterprise-sdk", - "metrics": [asdict(point) for point in self._buffer] - } - - # Send to endpoint - async with aiohttp.ClientSession() as session: - async with session.post( - self.config.metrics_endpoint, - json=payload, - timeout=10 - ) as response: - if response.status == 200: - self._buffer.clear() - self._last_flush = datetime.utcnow() - self.logger.debug(f"Flushed {len(payload['metrics'])} metrics") - else: - self.logger.error(f"Failed to flush metrics: {response.status}") - - except Exception as e: - self.logger.error(f"Error flushing metrics: {e}") - - -class PerformanceTracker: - """Track performance metrics for operations""" - - def __init__(self, metrics: MetricsCollector): - self.metrics = metrics - self._operations: Dict[str, float] = {} - - def start_operation(self, operation: str): - """Start timing an operation""" - self._operations[operation] = time.time() - - def end_operation(self, operation: str, tags: Dict[str, str] = None): - """End timing an operation""" - if operation in self._operations: - duration = time.time() - self._operations[operation] - del self._operations[operation] - - self.metrics.timer(f"operation_{operation}", duration, tags) - - return duration - return None - - async def track_operation(self, operation: str, coro, tags: Dict[str, str] = None): - """Context manager for tracking operations""" - start = time.time() - try: - result = await coro - success = True - return result - except Exception as e: - success = False - raise - finally: - duration = time.time() - start - - metric_tags = { - "operation": operation, - "success": str(success), - **(tags or {}) - } - - self.metrics.timer(f"operation_{operation}", duration, metric_tags) - self.metrics.increment(f"operations_total", 1.0, metric_tags) diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/__init__.py b/enterprise-connectors/python-sdk/aitbc_enterprise/payments/__init__.py deleted file mode 100644 index b92b9dcb..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Payment processor connectors for AITBC Enterprise -""" - -from .base import PaymentConnector, PaymentMethod, Charge, Refund, Subscription -from .stripe import StripeConnector -from .paypal import PayPalConnector -from .square import SquareConnector - -__all__ = [ - "PaymentConnector", - "PaymentMethod", - "Charge", - "Refund", - "Subscription", - "StripeConnector", - "PayPalConnector", - "SquareConnector", -] diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/base.py b/enterprise-connectors/python-sdk/aitbc_enterprise/payments/base.py deleted file mode 100644 index a7df7adb..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/base.py +++ /dev/null @@ -1,256 +0,0 @@ -""" -Base classes for payment processor connectors -""" - -from abc import ABC, abstractmethod -from typing import Dict, Any, Optional, List -from datetime import datetime -from dataclasses import dataclass -from enum import Enum - - -class PaymentStatus(Enum): - """Payment status enumeration""" - PENDING = "pending" - SUCCEEDED = "succeeded" - FAILED = "failed" - REFUNDED = "refunded" - PARTIALLY_REFUNDED = "partially_refunded" - CANCELED = "canceled" - - -class RefundStatus(Enum): - """Refund status enumeration""" - PENDING = "pending" - SUCCEEDED = "succeeded" - FAILED = "failed" - CANCELED = "canceled" - - -class SubscriptionStatus(Enum): - """Subscription status enumeration""" - TRIALING = "trialing" - ACTIVE = "active" - PAST_DUE = "past_due" - CANCELED = "canceled" - UNPAID = "unpaid" - - -@dataclass -class PaymentMethod: - """Payment method representation""" - id: str - type: str - created_at: datetime - metadata: Dict[str, Any] - - # Card-specific fields - brand: Optional[str] = None - last4: Optional[str] = None - exp_month: Optional[int] = None - exp_year: Optional[int] = None - - # Bank account fields - bank_name: Optional[str] = None - last4_ach: Optional[str] = None - routing_number: Optional[str] = None - - @classmethod - def from_stripe_payment_method(cls, pm_data: Dict[str, Any]) -> 'PaymentMethod': - """Create from Stripe payment method data""" - card = pm_data.get("card", {}) - - return cls( - id=pm_data["id"], - type=pm_data["type"], - created_at=datetime.fromtimestamp(pm_data["created"]), - metadata=pm_data.get("metadata", {}), - brand=card.get("brand"), - last4=card.get("last4"), - exp_month=card.get("exp_month"), - exp_year=card.get("exp_year") - ) - - -@dataclass -class Charge: - """Charge representation""" - id: str - amount: int - currency: str - status: PaymentStatus - created_at: datetime - updated_at: datetime - description: Optional[str] - metadata: Dict[str, Any] - - # Refund information - amount_refunded: int = 0 - refunds: List[Dict[str, Any]] = None - - # Payment method - payment_method_id: Optional[str] = None - payment_method_details: Optional[Dict[str, Any]] = None - - def __post_init__(self): - if self.refunds is None: - self.refunds = [] - - @classmethod - def from_stripe_charge(cls, charge_data: Dict[str, Any]) -> 'Charge': - """Create from Stripe charge data""" - return cls( - id=charge_data["id"], - amount=charge_data["amount"], - currency=charge_data["currency"], - status=PaymentStatus(charge_data["status"]), - created_at=datetime.fromtimestamp(charge_data["created"]), - updated_at=datetime.fromtimestamp(charge_data.get("updated", charge_data["created"])), - description=charge_data.get("description"), - metadata=charge_data.get("metadata", {}), - amount_refunded=charge_data.get("amount_refunded", 0), - refunds=[r.to_dict() for r in charge_data.get("refunds", {}).get("data", [])], - payment_method_id=charge_data.get("payment_method"), - payment_method_details=charge_data.get("payment_method_details") - ) - - -@dataclass -class Refund: - """Refund representation""" - id: str - amount: int - currency: str - status: RefundStatus - created_at: datetime - updated_at: datetime - charge_id: str - reason: Optional[str] - metadata: Dict[str, Any] - - @classmethod - def from_stripe_refund(cls, refund_data: Dict[str, Any]) -> 'Refund': - """Create from Stripe refund data""" - return cls( - id=refund_data["id"], - amount=refund_data["amount"], - currency=refund_data["currency"], - status=RefundStatus(refund_data["status"]), - created_at=datetime.fromtimestamp(refund_data["created"]), - updated_at=datetime.fromtimestamp(refund_data.get("updated", refund_data["created"])), - charge_id=refund_data["charge"], - reason=refund_data.get("reason"), - metadata=refund_data.get("metadata", {}) - ) - - -@dataclass -class Subscription: - """Subscription representation""" - id: str - status: SubscriptionStatus - created_at: datetime - updated_at: datetime - current_period_start: datetime - current_period_end: datetime - customer_id: str - metadata: Dict[str, Any] - - # Pricing - amount: Optional[int] = None - currency: Optional[str] = None - interval: Optional[str] = None - interval_count: Optional[int] = None - - # Trial - trial_start: Optional[datetime] = None - trial_end: Optional[datetime] = None - - # Cancellation - canceled_at: Optional[datetime] = None - ended_at: Optional[datetime] = None - - @classmethod - def from_stripe_subscription(cls, sub_data: Dict[str, Any]) -> 'Subscription': - """Create from Stripe subscription data""" - items = sub_data.get("items", {}).get("data", []) - first_item = items[0] if items else {} - price = first_item.get("price", {}) - - return cls( - id=sub_data["id"], - status=SubscriptionStatus(sub_data["status"]), - created_at=datetime.fromtimestamp(sub_data["created"]), - updated_at=datetime.fromtimestamp(sub_data.get("updated", sub_data["created"])), - current_period_start=datetime.fromtimestamp(sub_data["current_period_start"]), - current_period_end=datetime.fromtimestamp(sub_data["current_period_end"]), - customer_id=sub_data["customer"], - metadata=sub_data.get("metadata", {}), - amount=price.get("unit_amount"), - currency=price.get("currency"), - interval=price.get("recurring", {}).get("interval"), - interval_count=price.get("recurring", {}).get("interval_count"), - trial_start=datetime.fromtimestamp(sub_data["trial_start"]) if sub_data.get("trial_start") else None, - trial_end=datetime.fromtimestamp(sub_data["trial_end"]) if sub_data.get("trial_end") else None, - canceled_at=datetime.fromtimestamp(sub_data["canceled_at"]) if sub_data.get("canceled_at") else None, - ended_at=datetime.fromtimestamp(sub_data["ended_at"]) if sub_data.get("ended_at") else None - ) - - -class PaymentConnector(ABC): - """Abstract base class for payment connectors""" - - def __init__(self, client, config): - self.client = client - self.config = config - - @abstractmethod - async def create_charge( - self, - amount: int, - currency: str, - source: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None - ) -> Charge: - """Create a charge""" - pass - - @abstractmethod - async def create_refund( - self, - charge_id: str, - amount: Optional[int] = None, - reason: Optional[str] = None - ) -> Refund: - """Create a refund""" - pass - - @abstractmethod - async def create_payment_method( - self, - type: str, - card: Dict[str, Any], - metadata: Optional[Dict[str, Any]] = None - ) -> PaymentMethod: - """Create a payment method""" - pass - - @abstractmethod - async def create_subscription( - self, - customer: str, - items: List[Dict[str, Any]], - metadata: Optional[Dict[str, Any]] = None - ) -> Subscription: - """Create a subscription""" - pass - - @abstractmethod - async def cancel_subscription( - self, - subscription_id: str, - at_period_end: bool = True - ) -> Subscription: - """Cancel a subscription""" - pass diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/paypal.py b/enterprise-connectors/python-sdk/aitbc_enterprise/payments/paypal.py deleted file mode 100644 index 5fe9d8d7..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/paypal.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -PayPal payment connector for AITBC Enterprise (Placeholder) -""" - -from .base import PaymentConnector, PaymentMethod, Charge, Refund, Subscription - - -class PayPalConnector(PaymentConnector): - """PayPal payment processor connector""" - - def __init__(self, client, config, paypal_client_id, paypal_secret): - # TODO: Implement PayPal connector - raise NotImplementedError("PayPal connector not yet implemented") - - async def create_charge(self, amount, currency, source, description=None, metadata=None): - # TODO: Implement PayPal charge creation - raise NotImplementedError - - async def create_refund(self, charge_id, amount=None, reason=None): - # TODO: Implement PayPal refund - raise NotImplementedError - - async def create_payment_method(self, type, card, metadata=None): - # TODO: Implement PayPal payment method - raise NotImplementedError - - async def create_subscription(self, customer, items, metadata=None): - # TODO: Implement PayPal subscription - raise NotImplementedError - - async def cancel_subscription(self, subscription_id, at_period_end=True): - # TODO: Implement PayPal subscription cancellation - raise NotImplementedError diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/square.py b/enterprise-connectors/python-sdk/aitbc_enterprise/payments/square.py deleted file mode 100644 index 3b7f0ea3..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/square.py +++ /dev/null @@ -1,33 +0,0 @@ -""" -Square payment connector for AITBC Enterprise (Placeholder) -""" - -from .base import PaymentConnector, PaymentMethod, Charge, Refund, Subscription - - -class SquareConnector(PaymentConnector): - """Square payment processor connector""" - - def __init__(self, client, config, square_access_token): - # TODO: Implement Square connector - raise NotImplementedError("Square connector not yet implemented") - - async def create_charge(self, amount, currency, source, description=None, metadata=None): - # TODO: Implement Square charge creation - raise NotImplementedError - - async def create_refund(self, charge_id, amount=None, reason=None): - # TODO: Implement Square refund - raise NotImplementedError - - async def create_payment_method(self, type, card, metadata=None): - # TODO: Implement Square payment method - raise NotImplementedError - - async def create_subscription(self, customer, items, metadata=None): - # TODO: Implement Square subscription - raise NotImplementedError - - async def cancel_subscription(self, subscription_id, at_period_end=True): - # TODO: Implement Square subscription cancellation - raise NotImplementedError diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/stripe.py b/enterprise-connectors/python-sdk/aitbc_enterprise/payments/stripe.py deleted file mode 100644 index 4d326fd2..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/payments/stripe.py +++ /dev/null @@ -1,489 +0,0 @@ -""" -Stripe payment connector for AITBC Enterprise -""" - -import asyncio -import logging -from typing import Dict, Any, Optional, List -from datetime import datetime, timedelta -import stripe - -from ..base import BaseConnector, OperationResult, Transaction -from ..core import ConnectorConfig -from .base import PaymentConnector, PaymentMethod, Charge, Refund, Subscription -from ..exceptions import PaymentError, ValidationError - - -class StripeConnector(PaymentConnector): - """Stripe payment processor connector""" - - def __init__( - self, - client: 'AITBCClient', - config: ConnectorConfig, - stripe_api_key: str, - webhook_secret: Optional[str] = None - ): - super().__init__(client, config) - - # Stripe configuration - self.stripe_api_key = stripe_api_key - self.webhook_secret = webhook_secret - - # Initialize Stripe client - stripe.api_key = stripe_api_key - stripe.api_version = "2023-10-16" - - # Stripe-specific configuration - self._stripe_config = { - "api_key": stripe_api_key, - "api_version": stripe.api_version, - "connect_timeout": config.timeout, - "read_timeout": config.timeout - } - - async def _initialize(self) -> None: - """Initialize Stripe connector""" - try: - # Test Stripe connection - await self._test_stripe_connection() - - # Set up webhook handler - if self.webhook_secret: - await self._setup_webhook_handler() - - self.logger.info("Stripe connector initialized") - - except Exception as e: - raise PaymentError(f"Failed to initialize Stripe: {e}") - - async def _cleanup(self) -> None: - """Cleanup Stripe connector""" - # No specific cleanup needed for Stripe - pass - - async def _execute_operation( - self, - operation: str, - data: Dict[str, Any], - **kwargs - ) -> OperationResult: - """Execute Stripe-specific operations""" - try: - if operation == "create_charge": - return await self._create_charge(data) - elif operation == "create_refund": - return await self._create_refund(data) - elif operation == "create_payment_method": - return await self._create_payment_method(data) - elif operation == "create_customer": - return await self._create_customer(data) - elif operation == "create_subscription": - return await self._create_subscription(data) - elif operation == "cancel_subscription": - return await self._cancel_subscription(data) - elif operation == "retrieve_balance": - return await self._retrieve_balance() - else: - raise ValidationError(f"Unknown operation: {operation}") - - except stripe.error.StripeError as e: - self.logger.error(f"Stripe error: {e}") - return OperationResult( - success=False, - error=str(e), - metadata={"stripe_error_code": getattr(e, 'code', None)} - ) - except Exception as e: - self.logger.error(f"Operation failed: {e}") - return OperationResult( - success=False, - error=str(e) - ) - - async def create_charge( - self, - amount: int, - currency: str, - source: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None - ) -> Charge: - """Create a charge""" - result = await self.execute_operation( - "create_charge", - { - "amount": amount, - "currency": currency, - "source": source, - "description": description, - "metadata": metadata or {} - } - ) - - if not result.success: - raise PaymentError(result.error) - - return Charge.from_stripe_charge(result.data) - - async def create_refund( - self, - charge_id: str, - amount: Optional[int] = None, - reason: Optional[str] = None - ) -> Refund: - """Create a refund""" - result = await self.execute_operation( - "create_refund", - { - "charge": charge_id, - "amount": amount, - "reason": reason - } - ) - - if not result.success: - raise PaymentError(result.error) - - return Refund.from_stripe_refund(result.data) - - async def create_payment_method( - self, - type: str, - card: Dict[str, Any], - metadata: Optional[Dict[str, Any]] = None - ) -> PaymentMethod: - """Create a payment method""" - result = await self.execute_operation( - "create_payment_method", - { - "type": type, - "card": card, - "metadata": metadata or {} - } - ) - - if not result.success: - raise PaymentError(result.error) - - return PaymentMethod.from_stripe_payment_method(result.data) - - async def create_subscription( - self, - customer: str, - items: List[Dict[str, Any]], - metadata: Optional[Dict[str, Any]] = None - ) -> Subscription: - """Create a subscription""" - result = await self.execute_operation( - "create_subscription", - { - "customer": customer, - "items": items, - "metadata": metadata or {} - } - ) - - if not result.success: - raise PaymentError(result.error) - - return Subscription.from_stripe_subscription(result.data) - - async def cancel_subscription( - self, - subscription_id: str, - at_period_end: bool = True - ) -> Subscription: - """Cancel a subscription""" - result = await self.execute_operation( - "cancel_subscription", - { - "subscription": subscription_id, - "at_period_end": at_period_end - } - ) - - if not result.success: - raise PaymentError(result.error) - - return Subscription.from_stripe_subscription(result.data) - - async def retrieve_balance(self) -> Dict[str, Any]: - """Retrieve account balance""" - result = await self.execute_operation("retrieve_balance", {}) - - if not result.success: - raise PaymentError(result.error) - - return result.data - - async def verify_webhook(self, payload: bytes, signature: str) -> bool: - """Verify Stripe webhook signature""" - try: - stripe.WebhookSignature.verify_header( - payload, - signature, - self.webhook_secret, - 300 - ) - return True - except stripe.error.SignatureVerificationError: - return False - - async def handle_webhook(self, payload: bytes) -> Dict[str, Any]: - """Handle Stripe webhook""" - try: - event = stripe.Webhook.construct_event( - payload, - None, # Already verified - self.webhook_secret, - 300 - ) - - # Process event based on type - result = await self._process_webhook_event(event) - - return { - "processed": True, - "event_type": event.type, - "event_id": event.id, - "result": result - } - - except Exception as e: - self.logger.error(f"Webhook processing failed: {e}") - return { - "processed": False, - "error": str(e) - } - - # Private methods - - async def _test_stripe_connection(self): - """Test Stripe API connection""" - try: - # Use asyncio to run in thread - loop = asyncio.get_event_loop() - await loop.run_in_executor(None, stripe.Balance.retrieve) - except Exception as e: - raise PaymentError(f"Stripe connection test failed: {e}") - - async def _setup_webhook_handler(self): - """Setup webhook handler""" - # Register webhook verification with base connector - self.add_operation_handler("webhook.verified", self._handle_verified_webhook) - - async def _create_charge(self, data: Dict[str, Any]) -> OperationResult: - """Create Stripe charge""" - loop = asyncio.get_event_loop() - - try: - charge = await loop.run_in_executor( - None, - lambda: stripe.Charge.create(**data) - ) - - return OperationResult( - success=True, - data=charge.to_dict(), - metadata={"charge_id": charge.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to create charge: {e}") - - async def _create_refund(self, data: Dict[str, Any]) -> OperationResult: - """Create Stripe refund""" - loop = asyncio.get_event_loop() - - try: - refund = await loop.run_in_executor( - None, - lambda: stripe.Refund.create(**data) - ) - - return OperationResult( - success=True, - data=refund.to_dict(), - metadata={"refund_id": refund.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to create refund: {e}") - - async def _create_payment_method(self, data: Dict[str, Any]) -> OperationResult: - """Create Stripe payment method""" - loop = asyncio.get_event_loop() - - try: - pm = await loop.run_in_executor( - None, - lambda: stripe.PaymentMethod.create(**data) - ) - - return OperationResult( - success=True, - data=pm.to_dict(), - metadata={"payment_method_id": pm.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to create payment method: {e}") - - async def _create_customer(self, data: Dict[str, Any]) -> OperationResult: - """Create Stripe customer""" - loop = asyncio.get_event_loop() - - try: - customer = await loop.run_in_executor( - None, - lambda: stripe.Customer.create(**data) - ) - - return OperationResult( - success=True, - data=customer.to_dict(), - metadata={"customer_id": customer.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to create customer: {e}") - - async def _create_subscription(self, data: Dict[str, Any]) -> OperationResult: - """Create Stripe subscription""" - loop = asyncio.get_event_loop() - - try: - subscription = await loop.run_in_executor( - None, - lambda: stripe.Subscription.create(**data) - ) - - return OperationResult( - success=True, - data=subscription.to_dict(), - metadata={"subscription_id": subscription.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to create subscription: {e}") - - async def _cancel_subscription(self, data: Dict[str, Any]) -> OperationResult: - """Cancel Stripe subscription""" - loop = asyncio.get_event_loop() - - try: - subscription = await loop.run_in_executor( - None, - lambda: stripe.Subscription.retrieve(data["subscription"]) - ) - - subscription = await loop.run_in_executor( - None, - lambda: subscription.cancel(at_period_end=data.get("at_period_end", True)) - ) - - return OperationResult( - success=True, - data=subscription.to_dict(), - metadata={"subscription_id": subscription.id} - ) - - except Exception as e: - raise PaymentError(f"Failed to cancel subscription: {e}") - - async def _retrieve_balance(self) -> OperationResult: - """Retrieve Stripe balance""" - loop = asyncio.get_event_loop() - - try: - balance = await loop.run_in_executor(None, stripe.Balance.retrieve) - - return OperationResult( - success=True, - data=balance.to_dict() - ) - - except Exception as e: - raise PaymentError(f"Failed to retrieve balance: {e}") - - async def _process_webhook_event(self, event) -> Dict[str, Any]: - """Process webhook event""" - event_type = event.type - - if event_type.startswith("charge."): - return await self._handle_charge_event(event) - elif event_type.startswith("payment_method."): - return await self._handle_payment_method_event(event) - elif event_type.startswith("customer."): - return await self._handle_customer_event(event) - elif event_type.startswith("invoice."): - return await self._handle_invoice_event(event) - else: - self.logger.info(f"Unhandled webhook event type: {event_type}") - return {"status": "ignored"} - - async def _handle_charge_event(self, event) -> Dict[str, Any]: - """Handle charge-related webhook events""" - charge = event.data.object - - # Emit to AITBC - await self.client.post( - "/webhooks/stripe/charge", - json={ - "event_id": event.id, - "event_type": event.type, - "charge": charge.to_dict() - } - ) - - return {"status": "processed", "charge_id": charge.id} - - async def _handle_payment_method_event(self, event) -> Dict[str, Any]: - """Handle payment method webhook events""" - pm = event.data.object - - await self.client.post( - "/webhooks/stripe/payment_method", - json={ - "event_id": event.id, - "event_type": event.type, - "payment_method": pm.to_dict() - } - ) - - return {"status": "processed", "payment_method_id": pm.id} - - async def _handle_customer_event(self, event) -> Dict[str, Any]: - """Handle customer webhook events""" - customer = event.data.object - - await self.client.post( - "/webhooks/stripe/customer", - json={ - "event_id": event.id, - "event_type": event.type, - "customer": customer.to_dict() - } - ) - - return {"status": "processed", "customer_id": customer.id} - - async def _handle_invoice_event(self, event) -> Dict[str, Any]: - """Handle invoice webhook events""" - invoice = event.data.object - - await self.client.post( - "/webhooks/stripe/invoice", - json={ - "event_id": event.id, - "event_type": event.type, - "invoice": invoice.to_dict() - } - ) - - return {"status": "processed", "invoice_id": invoice.id} - - async def _handle_verified_webhook(self, data: Dict[str, Any]): - """Handle verified webhook""" - self.logger.info(f"Webhook verified: {data}") diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/rate_limiter.py b/enterprise-connectors/python-sdk/aitbc_enterprise/rate_limiter.py deleted file mode 100644 index e7c96b37..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/rate_limiter.py +++ /dev/null @@ -1,189 +0,0 @@ -""" -Rate limiting for AITBC Enterprise Connectors -""" - -import asyncio -import time -from typing import Optional, Dict, Any -from collections import deque -from dataclasses import dataclass - -from .core import ConnectorConfig -from .exceptions import RateLimitError - - -@dataclass -class RateLimitInfo: - """Rate limit information""" - limit: int - remaining: int - reset_time: float - retry_after: Optional[int] = None - - -class TokenBucket: - """Token bucket rate limiter""" - - def __init__(self, rate: float, capacity: int): - self.rate = rate # Tokens per second - self.capacity = capacity - self.tokens = capacity - self.last_refill = time.time() - self._lock = asyncio.Lock() - - async def acquire(self, tokens: int = 1) -> bool: - """Acquire tokens from bucket""" - async with self._lock: - now = time.time() - - # Refill tokens - elapsed = now - self.last_refill - self.tokens = min(self.capacity, self.tokens + elapsed * self.rate) - self.last_refill = now - - # Check if enough tokens - if self.tokens >= tokens: - self.tokens -= tokens - return True - - return False - - async def wait_for_token(self, tokens: int = 1): - """Wait until token is available""" - while not await self.acquire(tokens): - # Calculate wait time - wait_time = (tokens - self.tokens) / self.rate - await asyncio.sleep(wait_time) - - -class SlidingWindowCounter: - """Sliding window rate limiter""" - - def __init__(self, limit: int, window: int): - self.limit = limit - self.window = window # Window size in seconds - self.requests = deque() - self._lock = asyncio.Lock() - - async def is_allowed(self) -> bool: - """Check if request is allowed""" - async with self._lock: - now = time.time() - - # Remove old requests - while self.requests and self.requests[0] <= now - self.window: - self.requests.popleft() - - # Check if under limit - if len(self.requests) < self.limit: - self.requests.append(now) - return True - - return False - - async def wait_for_slot(self): - """Wait until request slot is available""" - while not await self.is_allowed(): - # Calculate wait time until oldest request expires - if self.requests: - wait_time = self.requests[0] + self.window - time.time() - if wait_time > 0: - await asyncio.sleep(wait_time) - - -class RateLimiter: - """Rate limiter with multiple strategies""" - - def __init__(self, config: ConnectorConfig): - self.config = config - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - # Initialize rate limiters - self._token_bucket = None - self._sliding_window = None - self._strategy = "token_bucket" - - if config.rate_limit: - # Default to token bucket with burst capacity - burst = config.burst_limit or config.rate_limit * 2 - self._token_bucket = TokenBucket( - rate=config.rate_limit, - capacity=burst - ) - - # Track rate limit info from server - self._server_limits: Dict[str, RateLimitInfo] = {} - - async def acquire(self, endpoint: str = None) -> None: - """Acquire rate limit permit""" - if self._strategy == "token_bucket" and self._token_bucket: - await self._token_bucket.wait_for_token() - elif self._strategy == "sliding_window" and self._sliding_window: - await self._sliding_window.wait_for_slot() - - # Check server-side limits - if endpoint and endpoint in self._server_limits: - limit_info = self._server_limits[endpoint] - - if limit_info.remaining <= 0: - wait_time = limit_info.reset_time - time.time() - if wait_time > 0: - raise RateLimitError( - f"Rate limit exceeded for {endpoint}", - retry_after=int(wait_time) + 1 - ) - - def update_server_limit(self, endpoint: str, headers: Dict[str, str]): - """Update rate limit info from server response""" - # Parse common rate limit headers - limit = headers.get("X-RateLimit-Limit") - remaining = headers.get("X-RateLimit-Remaining") - reset = headers.get("X-RateLimit-Reset") - retry_after = headers.get("Retry-After") - - if limit or remaining or reset: - self._server_limits[endpoint] = RateLimitInfo( - limit=int(limit) if limit else 0, - remaining=int(remaining) if remaining else 0, - reset_time=float(reset) if reset else time.time() + 3600, - retry_after=int(retry_after) if retry_after else None - ) - - self.logger.debug( - f"Updated rate limit for {endpoint}: " - f"{remaining}/{limit} remaining" - ) - - def get_limit_info(self, endpoint: str = None) -> Optional[RateLimitInfo]: - """Get current rate limit info""" - if endpoint and endpoint in self._server_limits: - return self._server_limits[endpoint] - - # Return configured limit if no server limit - if self.config.rate_limit: - return RateLimitInfo( - limit=self.config.rate_limit, - remaining=self.config.rate_limit, # Approximate - reset_time=time.time() + 3600 - ) - - return None - - def set_strategy(self, strategy: str): - """Set rate limiting strategy""" - if strategy not in ["token_bucket", "sliding_window", "none"]: - raise ValueError(f"Unknown strategy: {strategy}") - - self._strategy = strategy - - def reset(self): - """Reset rate limiter state""" - if self._token_bucket: - self._token_bucket.tokens = self._token_bucket.capacity - self._token_bucket.last_refill = time.time() - - if self._sliding_window: - self._sliding_window.requests.clear() - - self._server_limits.clear() - self.logger.info("Rate limiter reset") diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/validators.py b/enterprise-connectors/python-sdk/aitbc_enterprise/validators.py deleted file mode 100644 index b34bd70e..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/validators.py +++ /dev/null @@ -1,318 +0,0 @@ -""" -Validation utilities for AITBC Enterprise Connectors -""" - -import re -from abc import ABC, abstractmethod -from typing import Dict, Any, List, Optional, Union -from dataclasses import dataclass -from datetime import datetime - -from .exceptions import ValidationError - - -@dataclass -class ValidationRule: - """Validation rule definition""" - name: str - required: bool = True - type: type = str - min_length: Optional[int] = None - max_length: Optional[int] = None - pattern: Optional[str] = None - min_value: Optional[Union[int, float]] = None - max_value: Optional[Union[int, float]] = None - allowed_values: Optional[List[Any]] = None - custom_validator: Optional[callable] = None - - -class BaseValidator(ABC): - """Abstract base class for validators""" - - @abstractmethod - async def validate(self, operation: str, data: Dict[str, Any]) -> bool: - """Validate operation data""" - pass - - -class SchemaValidator(BaseValidator): - """Schema-based validator""" - - def __init__(self, schemas: Dict[str, Dict[str, ValidationRule]]): - self.schemas = schemas - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - async def validate(self, operation: str, data: Dict[str, Any]) -> bool: - """Validate data against schema""" - if operation not in self.schemas: - self.logger.warning(f"No schema for operation: {operation}") - return True - - schema = self.schemas[operation] - errors = [] - - # Validate each field - for field_name, rule in schema.items(): - try: - self._validate_field(field_name, data.get(field_name), rule) - except ValidationError as e: - errors.append(f"{field_name}: {str(e)}") - - # Check for unexpected fields - allowed_fields = set(schema.keys()) - provided_fields = set(data.keys()) - unexpected = provided_fields - allowed_fields - - if unexpected: - self.logger.warning(f"Unexpected fields: {unexpected}") - - if errors: - raise ValidationError(f"Validation failed: {'; '.join(errors)}") - - return True - - def _validate_field(self, name: str, value: Any, rule: ValidationRule): - """Validate a single field""" - # Check required - if rule.required and value is None: - raise ValidationError(f"{name} is required") - - # Skip validation if not required and value is None - if not rule.required and value is None: - return - - # Type validation - if not isinstance(value, rule.type): - try: - value = rule.type(value) - except (ValueError, TypeError): - raise ValidationError(f"{name} must be of type {rule.type.__name__}") - - # String validations - if isinstance(value, str): - if rule.min_length and len(value) < rule.min_length: - raise ValidationError(f"{name} must be at least {rule.min_length} characters") - - if rule.max_length and len(value) > rule.max_length: - raise ValidationError(f"{name} must be at most {rule.max_length} characters") - - if rule.pattern and not re.match(rule.pattern, value): - raise ValidationError(f"{name} does not match required pattern") - - # Numeric validations - if isinstance(value, (int, float)): - if rule.min_value is not None and value < rule.min_value: - raise ValidationError(f"{name} must be at least {rule.min_value}") - - if rule.max_value is not None and value > rule.max_value: - raise ValidationError(f"{name} must be at most {rule.max_value}") - - # Allowed values - if rule.allowed_values and value not in rule.allowed_values: - raise ValidationError(f"{name} must be one of: {rule.allowed_values}") - - # Custom validator - if rule.custom_validator: - try: - if not rule.custom_validator(value): - raise ValidationError(f"{name} failed custom validation") - except Exception as e: - raise ValidationError(f"{name} validation error: {str(e)}") - - -class PaymentValidator(SchemaValidator): - """Validator for payment operations""" - - def __init__(self): - schemas = { - "create_charge": { - "amount": ValidationRule( - name="amount", - type=int, - min_value=50, # Minimum $0.50 - max_value=99999999, # Maximum $999,999.99 - custom_validator=lambda x: x % 1 == 0 # Must be whole cents - ), - "currency": ValidationRule( - name="currency", - type=str, - min_length=3, - max_length=3, - pattern=r"^[A-Z]{3}$", - allowed_values=["USD", "EUR", "GBP", "JPY", "CAD", "AUD"] - ), - "source": ValidationRule( - name="source", - type=str, - min_length=1, - max_length=255 - ), - "description": ValidationRule( - name="description", - type=str, - required=False, - max_length=1000 - ) - }, - "create_refund": { - "charge": ValidationRule( - name="charge", - type=str, - min_length=1, - pattern=r"^ch_[a-zA-Z0-9]+$" - ), - "amount": ValidationRule( - name="amount", - type=int, - required=False, - min_value=50, - custom_validator=lambda x: x % 1 == 0 - ), - "reason": ValidationRule( - name="reason", - type=str, - required=False, - allowed_values=["duplicate", "fraudulent", "requested_by_customer"] - ) - }, - "create_payment_method": { - "type": ValidationRule( - name="type", - type=str, - allowed_values=["card", "bank_account"] - ), - "card": ValidationRule( - name="card", - type=dict, - custom_validator=lambda x: all(k in x for k in ["number", "exp_month", "exp_year"]) - ) - } - } - - super().__init__(schemas) - - -class ERPValidator(SchemaValidator): - """Validator for ERP operations""" - - def __init__(self): - schemas = { - "create_customer": { - "name": ValidationRule( - name="name", - type=str, - min_length=1, - max_length=100 - ), - "email": ValidationRule( - name="email", - type=str, - pattern=r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$" - ), - "phone": ValidationRule( - name="phone", - type=str, - required=False, - pattern=r"^\+?[1-9]\d{1,14}$" - ), - "address": ValidationRule( - name="address", - type=dict, - required=False - ) - }, - "create_order": { - "customer_id": ValidationRule( - name="customer_id", - type=str, - min_length=1 - ), - "items": ValidationRule( - name="items", - type=list, - min_length=1, - custom_validator=lambda x: all(isinstance(i, dict) and "product_id" in i and "quantity" in i for i in x) - ), - "currency": ValidationRule( - name="currency", - type=str, - pattern=r"^[A-Z]{3}$" - ) - }, - "sync_data": { - "entity_type": ValidationRule( - name="entity_type", - type=str, - allowed_values=["customers", "orders", "products", "invoices"] - ), - "since": ValidationRule( - name="since", - type=str, - required=False, - pattern=r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$" - ), - "limit": ValidationRule( - name="limit", - type=int, - required=False, - min_value=1, - max_value=1000 - ) - } - } - - super().__init__(schemas) - - -class CompositeValidator(BaseValidator): - """Combines multiple validators""" - - def __init__(self, validators: List[BaseValidator]): - self.validators = validators - - async def validate(self, operation: str, data: Dict[str, Any]) -> bool: - """Run all validators""" - errors = [] - - for validator in self.validators: - try: - await validator.validate(operation, data) - except ValidationError as e: - errors.append(str(e)) - - if errors: - raise ValidationError(f"Validation failed: {'; '.join(errors)}") - - return True - - -# Common validation functions -def validate_email(email: str) -> bool: - """Validate email address""" - pattern = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$" - return re.match(pattern, email) is not None - - -def validate_phone(phone: str) -> bool: - """Validate phone number (E.164 format)""" - pattern = r"^\+?[1-9]\d{1,14}$" - return re.match(pattern, phone) is not None - - -def validate_amount(amount: int) -> bool: - """Validate amount in cents""" - return amount > 0 and amount % 1 == 0 - - -def validate_currency(currency: str) -> bool: - """Validate currency code""" - return len(currency) == 3 and currency.isupper() - - -def validate_timestamp(timestamp: str) -> bool: - """Validate ISO 8601 timestamp""" - try: - datetime.fromisoformat(timestamp.replace('Z', '+00:00')) - return True - except ValueError: - return False diff --git a/enterprise-connectors/python-sdk/aitbc_enterprise/webhooks.py b/enterprise-connectors/python-sdk/aitbc_enterprise/webhooks.py deleted file mode 100644 index a621f906..00000000 --- a/enterprise-connectors/python-sdk/aitbc_enterprise/webhooks.py +++ /dev/null @@ -1,309 +0,0 @@ -""" -Webhook handling for AITBC Enterprise Connectors -""" - -import hashlib -import hmac -import json -import asyncio -from typing import Dict, Any, Optional, Callable, List, Awaitable -from datetime import datetime -from dataclasses import dataclass - -from .exceptions import WebhookError - - -@dataclass -class WebhookEvent: - """Webhook event representation""" - id: str - type: str - source: str - timestamp: datetime - data: Dict[str, Any] - signature: Optional[str] = None - - def to_dict(self) -> Dict[str, Any]: - return { - "id": self.id, - "type": self.type, - "source": self.source, - "timestamp": self.timestamp.isoformat(), - "data": self.data, - "signature": self.signature - } - - -class WebhookHandler: - """Handles webhook processing and verification""" - - def __init__(self, secret: str = None): - self.secret = secret - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - # Event handlers - self._handlers: Dict[str, List[Callable]] = {} - - # Processing state - self._processing = False - self._queue: asyncio.Queue = None - self._worker_task = None - - async def setup(self, endpoint: str, secret: str = None): - """Setup webhook handler""" - if secret: - self.secret = secret - - # Initialize queue and worker - self._queue = asyncio.Queue(maxsize=1000) - self._worker_task = asyncio.create_task(self._process_queue()) - - self.logger.info(f"Webhook handler setup for endpoint: {endpoint}") - - async def cleanup(self): - """Cleanup webhook handler""" - if self._worker_task: - self._worker_task.cancel() - try: - await self._worker_task - except asyncio.CancelledError: - pass - - self.logger.info("Webhook handler cleaned up") - - def add_handler(self, event_type: str, handler: Callable[[WebhookEvent], Awaitable[None]]): - """Add handler for specific event type""" - if event_type not in self._handlers: - self._handlers[event_type] = [] - self._handlers[event_type].append(handler) - - def remove_handler(self, event_type: str, handler: Callable): - """Remove handler for specific event type""" - if event_type in self._handlers: - try: - self._handlers[event_type].remove(handler) - except ValueError: - pass - - async def verify(self, payload: bytes, signature: str, algorithm: str = "sha256") -> bool: - """Verify webhook signature""" - if not self.secret: - self.logger.warning("No webhook secret configured, skipping verification") - return True - - try: - expected_signature = hmac.new( - self.secret.encode(), - payload, - getattr(hashlib, algorithm) - ).hexdigest() - - # Compare signatures securely - return hmac.compare_digest(expected_signature, signature) - - except Exception as e: - self.logger.error(f"Webhook verification failed: {e}") - return False - - async def handle(self, payload: bytes, signature: str = None) -> Dict[str, Any]: - """Handle incoming webhook""" - try: - # Parse payload - data = json.loads(payload.decode()) - - # Create event - event = WebhookEvent( - id=data.get("id", f"evt_{int(datetime.utcnow().timestamp())}"), - type=data.get("type", "unknown"), - source=data.get("source", "unknown"), - timestamp=datetime.fromisoformat(data.get("timestamp", datetime.utcnow().isoformat())), - data=data.get("data", {}), - signature=signature - ) - - # Verify signature if provided - if signature and not await self.verify(payload, signature): - raise WebhookError("Invalid webhook signature") - - # Queue for processing - if self._queue: - await self._queue.put(event) - return { - "status": "queued", - "event_id": event.id - } - else: - # Process immediately - result = await self._process_event(event) - return result - - except json.JSONDecodeError as e: - raise WebhookError(f"Invalid JSON payload: {e}") - except Exception as e: - self.logger.error(f"Webhook handling failed: {e}") - raise WebhookError(f"Processing failed: {e}") - - async def _process_queue(self): - """Process webhook events from queue""" - while True: - try: - event = await self._queue.get() - await self._process_event(event) - self._queue.task_done() - except asyncio.CancelledError: - break - except Exception as e: - self.logger.error(f"Error processing webhook event: {e}") - - async def _process_event(self, event: WebhookEvent) -> Dict[str, Any]: - """Process a single webhook event""" - try: - self.logger.debug(f"Processing webhook event: {event.type}") - - # Get handlers for event type - handlers = self._handlers.get(event.type, []) - - # Also check for wildcard handlers - wildcard_handlers = self._handlers.get("*", []) - handlers.extend(wildcard_handlers) - - if not handlers: - self.logger.warning(f"No handlers for event type: {event.type}") - return { - "status": "ignored", - "event_id": event.id, - "message": "No handlers registered" - } - - # Execute handlers - tasks = [] - for handler in handlers: - tasks.append(handler(event)) - - # Wait for all handlers to complete - results = await asyncio.gather(*tasks, return_exceptions=True) - - # Check for errors - errors = [] - for i, result in enumerate(results): - if isinstance(result, Exception): - errors.append(str(result)) - self.logger.error(f"Handler {i} failed: {result}") - - return { - "status": "processed" if not errors else "partial", - "event_id": event.id, - "handlers_count": len(handlers), - "errors_count": len(errors), - "errors": errors if errors else None - } - - except Exception as e: - self.logger.error(f"Failed to process webhook event: {e}") - return { - "status": "failed", - "event_id": event.id, - "error": str(e) - } - - -class StripeWebhookHandler(WebhookHandler): - """Stripe-specific webhook handler""" - - def __init__(self, secret: str): - super().__init__(secret) - self._setup_default_handlers() - - def _setup_default_handlers(self): - """Setup default Stripe event handlers""" - self.add_handler("charge.succeeded", self._handle_charge_succeeded) - self.add_handler("charge.failed", self._handle_charge_failed) - self.add_handler("payment_method.attached", self._handle_payment_method_attached) - self.add_handler("invoice.payment_succeeded", self._handle_invoice_succeeded) - - async def verify(self, payload: bytes, signature: str) -> bool: - """Verify Stripe webhook signature""" - try: - import stripe - - stripe.WebhookSignature.verify_header( - payload, - signature, - self.secret, - 300 # 5 minutes tolerance - ) - return True - - except Exception as e: - self.logger.error(f"Stripe webhook verification failed: {e}") - return False - - async def _handle_charge_succeeded(self, event: WebhookEvent): - """Handle successful charge""" - charge = event.data.get("object", {}) - self.logger.info(f"Charge succeeded: {charge.get('id')} - ${charge.get('amount', 0) / 100:.2f}") - - async def _handle_charge_failed(self, event: WebhookEvent): - """Handle failed charge""" - charge = event.data.get("object", {}) - self.logger.warning(f"Charge failed: {charge.get('id')} - {charge.get('failure_message')}") - - async def _handle_payment_method_attached(self, event: WebhookEvent): - """Handle payment method attachment""" - pm = event.data.get("object", {}) - self.logger.info(f"Payment method attached: {pm.get('id')} - {pm.get('type')}") - - async def _handle_invoice_succeeded(self, event: WebhookEvent): - """Handle successful invoice payment""" - invoice = event.data.get("object", {}) - self.logger.info(f"Invoice paid: {invoice.get('id')} - ${invoice.get('amount_paid', 0) / 100:.2f}") - - -class WebhookServer: - """Simple webhook server for testing""" - - def __init__(self, handler: WebhookHandler, port: int = 8080): - self.handler = handler - self.port = port - self.server = None - self.logger = __import__('logging').getLogger(f"aitbc.{self.__class__.__name__}") - - async def start(self): - """Start webhook server""" - from aiohttp import web - - async def handle_webhook(request): - # Get signature from header - signature = request.headers.get("Stripe-Signature") or request.headers.get("X-Signature") - - # Read payload - payload = await request.read() - - try: - # Handle webhook - result = await self.handler.handle(payload, signature) - return web.json_response(result) - except WebhookError as e: - return web.json_response( - {"error": str(e)}, - status=400 - ) - - # Create app - app = web.Application() - app.router.add_post("/webhook", handle_webhook) - - # Start server - runner = web.AppRunner(app) - await runner.setup() - site = web.TCPSite(runner, "localhost", self.port) - await site.start() - - self.server = runner - self.logger.info(f"Webhook server started on port {self.port}") - - async def stop(self): - """Stop webhook server""" - if self.server: - await self.server.cleanup() - self.logger.info("Webhook server stopped") diff --git a/enterprise-connectors/python-sdk/docs/README.md b/enterprise-connectors/python-sdk/docs/README.md deleted file mode 100644 index 9b2c72d5..00000000 --- a/enterprise-connectors/python-sdk/docs/README.md +++ /dev/null @@ -1,270 +0,0 @@ -# AITBC Enterprise Connectors SDK - -Python SDK for integrating AITBC with enterprise systems including payment processors, ERP systems, and other business applications. - -## Quick Start - -### Installation - -```bash -pip install aitbc-enterprise -``` - -### Basic Usage - -```python -import asyncio -from aitbc_enterprise import AITBCClient, ConnectorConfig -from aitbc_enterprise.payments import StripeConnector - -async def main(): - # Configure AITBC client - config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-api-key", - enterprise_id="enterprise-123" - ) - - # Create client and connector - async with AITBCClient(config) as client: - stripe = StripeConnector( - client=client, - config=config, - stripe_api_key="sk_test_your-stripe-key" - ) - - await stripe.initialize() - - # Create a charge - charge = await stripe.create_charge( - amount=2000, # $20.00 - currency="usd", - source="pm_card_visa", - description="AITBC service" - ) - - print(f"Charge created: {charge.id}") - - await stripe.cleanup() - -asyncio.run(main()) -``` - -## Features - -- **Async/Await Support**: Full async implementation for high performance -- **Enterprise Ready**: Built-in rate limiting, metrics, and error handling -- **Extensible**: Plugin architecture for custom connectors -- **Secure**: HSM-backed key management and audit logging -- **Compliant**: GDPR, SOC 2, and PCI DSS compliant - -## Supported Systems - -### Payment Processors -- ✅ Stripe -- ⏳ PayPal (Coming soon) -- ⏳ Square (Coming soon) - -### ERP Systems -- ⏳ SAP (IDOC/BAPI) -- ⏳ Oracle (REST/SOAP) -- ⏳ NetSuite (SuiteTalk) - -## Architecture - -The SDK uses a modular architecture with dependency injection: - -``` -AITBCClient -├── Core Components -│ ├── AuthHandler (Bearer, OAuth2, HMAC, etc.) -│ ├── RateLimiter (Token bucket, Sliding window) -│ ├── MetricsCollector (Performance tracking) -│ └── WebhookHandler (Event processing) -├── BaseConnector -│ ├── Validation -│ ├── Error Handling -│ ├── Batch Operations -│ └── Event Handlers -└── Specific Connectors - ├── PaymentConnector - └── ERPConnector -``` - -## Configuration - -### Basic Configuration - -```python -config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-api-key", - timeout=30.0, - max_retries=3 -) -``` - -### Enterprise Features - -```python -config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-api-key", - enterprise_id="enterprise-123", - tenant_id="tenant-456", - region="us-east-1", - rate_limit=100, # requests per second - enable_metrics=True, - webhook_secret="whsec_your-secret" -) -``` - -### Authentication - -The SDK supports multiple authentication methods: - -```python -# Bearer token (default) -config = ConnectorConfig( - auth_type="bearer", - api_key="your-token" -) - -# OAuth 2.0 -config = ConnectorConfig( - auth_type="oauth2", - auth_config={ - "client_id": "your-client-id", - "client_secret": "your-secret", - "token_url": "https://oauth.example.com/token" - } -) - -# HMAC signature -config = ConnectorConfig( - auth_type="hmac", - api_key="your-key", - auth_config={ - "secret": "your-secret", - "algorithm": "sha256" - } -) -``` - -## Error Handling - -The SDK provides comprehensive error handling: - -```python -from aitbc_enterprise.exceptions import ( - AITBCError, - AuthenticationError, - RateLimitError, - PaymentError, - ValidationError -) - -try: - charge = await stripe.create_charge(...) -except RateLimitError as e: - print(f"Rate limited, retry after {e.retry_after}s") -except PaymentError as e: - print(f"Payment failed: {e}") -except AITBCError as e: - print(f"AITBC error: {e}") -``` - -## Webhooks - -Handle webhooks with built-in verification: - -```python -from aitbc_enterprise.webhooks import StripeWebhookHandler - -# Create webhook handler -webhook_handler = StripeWebhookHandler( - secret="whsec_your-webhook-secret" -) - -# Add custom handler -async def handle_charge(event): - print(f"Charge: {event.data}") - -webhook_handler.add_handler("charge.succeeded", handle_charge) - -# Process webhook -result = await webhook_handler.handle(payload, signature) -``` - -## Batch Operations - -Process multiple operations efficiently: - -```python -# Batch charges -operations = [ - { - "operation": "create_charge", - "data": {"amount": 1000, "currency": "usd", "source": "pm_123"} - }, - { - "operation": "create_charge", - "data": {"amount": 2000, "currency": "usd", "source": "pm_456"} - } -] - -results = await stripe.batch_execute(operations) -successful = sum(1 for r in results if r.success) -``` - -## Metrics and Monitoring - -Enable metrics collection: - -```python -config = ConnectorConfig( - enable_metrics=True, - metrics_endpoint="https://your-metrics.example.com" -) - -# Metrics are automatically collected -# Access metrics summary -print(stripe.metrics) -``` - -## Testing - -Use the test mode for development: - -```python -# Use test API keys -config = ConnectorConfig( - base_url="https://api-test.aitbc.io", - api_key="test-key" -) - -stripe = StripeConnector( - client=client, - config=config, - stripe_api_key="sk_test_key" # Stripe test key -) -``` - -## Examples - -See the `examples/` directory for complete examples: - -- `stripe_example.py` - Payment processing -- `webhook_example.py` - Webhook handling -- `enterprise_example.py` - Enterprise features - -## Support - -- **Documentation**: https://docs.aitbc.io/enterprise-sdk -- **Issues**: https://github.com/aitbc/enterprise-sdk/issues -- **Support**: enterprise@aitbc.io -- **Security**: security@aitbc.io - -## License - -Copyright © 2024 AITBC. All rights reserved. diff --git a/enterprise-connectors/python-sdk/docs/api-specification.md b/enterprise-connectors/python-sdk/docs/api-specification.md deleted file mode 100644 index b83f9184..00000000 --- a/enterprise-connectors/python-sdk/docs/api-specification.md +++ /dev/null @@ -1,598 +0,0 @@ -# AITBC Enterprise Connectors API Specification - -## Overview - -This document describes the API specification for the AITBC Enterprise Connectors SDK, including all available methods, parameters, and response formats. - -## Core API - -### AITBCClient - -The main client class for connecting to AITBC. - -#### Constructor - -```python -AITBCClient( - config: ConnectorConfig, - session: Optional[ClientSession] = None, - auth_handler: Optional[AuthHandler] = None, - rate_limiter: Optional[RateLimiter] = None, - metrics: Optional[MetricsCollector] = None -) -``` - -#### Methods - -##### connect() -Establish connection to AITBC. - -```python -async connect() -> None -``` - -##### disconnect() -Close connection to AITBC. - -```python -async disconnect() -> None -``` - -##### request() -Make authenticated request to AITBC API. - -```python -async request( - method: str, - path: str, - **kwargs -) -> Dict[str, Any] -``` - -**Parameters:** -- `method` (str): HTTP method (GET, POST, PUT, DELETE) -- `path` (str): API endpoint path -- `**kwargs`: Additional request parameters - -**Returns:** -- `Dict[str, Any]`: Response data - -##### get(), post(), put(), delete() -Convenience methods for HTTP requests. - -```python -async get(path: str, **kwargs) -> Dict[str, Any] -async post(path: str, **kwargs) -> Dict[str, Any] -async put(path: str, **kwargs) -> Dict[str, Any] -async delete(path: str, **kwargs) -> Dict[str, Any] -``` - -### ConnectorConfig - -Configuration class for connectors. - -#### Parameters - -```python -@dataclass -class ConnectorConfig: - base_url: str - api_key: str - api_version: str = "v1" - timeout: float = 30.0 - max_connections: int = 100 - max_retries: int = 3 - retry_backoff: float = 1.0 - rate_limit: Optional[int] = None - burst_limit: Optional[int] = None - auth_type: str = "bearer" - auth_config: Dict[str, Any] = field(default_factory=dict) - webhook_secret: Optional[str] = None - webhook_endpoint: Optional[str] = None - enable_metrics: bool = True - log_level: str = "INFO" - enterprise_id: Optional[str] = None - tenant_id: Optional[str] = None - region: Optional[str] = None -``` - -## Base Connector API - -### BaseConnector - -Abstract base class for all connectors. - -#### Methods - -##### initialize() -Initialize the connector. - -```python -async initialize() -> None -``` - -##### cleanup() -Cleanup connector resources. - -```python -async cleanup() -> None -``` - -##### execute_operation() -Execute an operation with validation. - -```python -async execute_operation( - operation: str, - data: Dict[str, Any], - **kwargs -) -> OperationResult -``` - -##### batch_execute() -Execute multiple operations concurrently. - -```python -async batch_execute( - operations: List[Dict[str, Any]], - max_concurrent: int = 10 -) -> List[OperationResult] -``` - -##### sync() -Synchronize data with external system. - -```python -async sync( - since: Optional[datetime] = None, - filters: Optional[Dict[str, Any]] = None -) -> Dict[str, Any] -``` - -#### Properties - -##### is_initialized -Check if connector is initialized. - -```python -@property -def is_initialized() -> bool -``` - -##### last_sync -Get last sync timestamp. - -```python -@property -def last_sync() -> Optional[datetime] -``` - -##### metrics -Get connector metrics. - -```python -@property -def metrics() -> Dict[str, Any] -``` - -## Payment Connector API - -### PaymentConnector - -Abstract base class for payment processors. - -#### Methods - -##### create_charge() -Create a charge. - -```python -async create_charge( - amount: int, - currency: str, - source: str, - description: Optional[str] = None, - metadata: Optional[Dict[str, Any]] = None -) -> Charge -``` - -**Parameters:** -- `amount` (int): Amount in smallest currency unit (cents) -- `currency` (str): 3-letter currency code -- `source` (str): Payment source ID -- `description` (str, optional): Charge description -- `metadata` (Dict, optional): Additional metadata - -**Returns:** -- `Charge`: Created charge object - -##### create_refund() -Create a refund. - -```python -async create_refund( - charge_id: str, - amount: Optional[int] = None, - reason: Optional[str] = None -) -> Refund -``` - -##### create_payment_method() -Create a payment method. - -```python -async create_payment_method( - type: str, - card: Dict[str, Any], - metadata: Optional[Dict[str, Any]] = None -) -> PaymentMethod -``` - -##### create_subscription() -Create a subscription. - -```python -async create_subscription( - customer: str, - items: List[Dict[str, Any]], - metadata: Optional[Dict[str, Any]] = None -) -> Subscription -``` - -##### cancel_subscription() -Cancel a subscription. - -```python -async cancel_subscription( - subscription_id: str, - at_period_end: bool = True -) -> Subscription -``` - -### Data Models - -#### Charge - -```python -@dataclass -class Charge: - id: str - amount: int - currency: str - status: PaymentStatus - created_at: datetime - updated_at: datetime - description: Optional[str] - metadata: Dict[str, Any] - amount_refunded: int = 0 - refunds: List[Dict[str, Any]] = None - payment_method_id: Optional[str] = None - payment_method_details: Optional[Dict[str, Any]] = None -``` - -#### Refund - -```python -@dataclass -class Refund: - id: str - amount: int - currency: str - status: RefundStatus - created_at: datetime - updated_at: datetime - charge_id: str - reason: Optional[str] - metadata: Dict[str, Any] -``` - -#### PaymentMethod - -```python -@dataclass -class PaymentMethod: - id: str - type: str - created_at: datetime - metadata: Dict[str, Any] - brand: Optional[str] = None - last4: Optional[str] = None - exp_month: Optional[int] = None - exp_year: Optional[int] = None -``` - -#### Subscription - -```python -@dataclass -class Subscription: - id: str - status: SubscriptionStatus - created_at: datetime - updated_at: datetime - current_period_start: datetime - current_period_end: datetime - customer_id: str - metadata: Dict[str, Any] - amount: Optional[int] = None - currency: Optional[str] = None - interval: Optional[str] = None - interval_count: Optional[int] = None - trial_start: Optional[datetime] = None - trial_end: Optional[datetime] = None - canceled_at: Optional[datetime] = None - ended_at: Optional[datetime] = None -``` - -## ERP Connector API - -### ERPConnector - -Base class for ERP connectors. - -#### Methods - -##### create_entity() -Create entity in ERP. - -```python -async _create_entity( - entity_type: str, - data: Dict[str, Any] -) -> OperationResult -``` - -##### update_entity() -Update entity in ERP. - -```python -async _update_entity( - entity_type: str, - data: Dict[str, Any] -) -> OperationResult -``` - -##### delete_entity() -Delete entity from ERP. - -```python -async _delete_entity( - entity_type: str, - data: Dict[str, Any] -) -> OperationResult -``` - -##### sync_data() -Synchronize data from ERP. - -```python -async _sync_data( - data: Dict[str, Any] -) -> OperationResult -``` - -##### batch_sync() -Batch synchronize data. - -```python -async _batch_sync( - data: Dict[str, Any] -) -> OperationResult -``` - -## Webhook API - -### WebhookHandler - -Handles webhook processing and verification. - -#### Methods - -##### setup() -Setup webhook handler. - -```python -async setup( - endpoint: str, - secret: str = None -) -> None -``` - -##### cleanup() -Cleanup webhook handler. - -```python -async cleanup() -> None -``` - -##### add_handler() -Add handler for specific event type. - -```python -def add_handler( - event_type: str, - handler: Callable[[WebhookEvent], Awaitable[None]] -) -> None -``` - -##### verify() -Verify webhook signature. - -```python -async verify( - payload: bytes, - signature: str, - algorithm: str = "sha256" -) -> bool -``` - -##### handle() -Handle incoming webhook. - -```python -async handle( - payload: bytes, - signature: str = None -) -> Dict[str, Any] -``` - -## Error Handling - -### Exception Hierarchy - -``` -AITBCError -├── AuthenticationError -├── RateLimitError -├── APIError -├── ConfigurationError -├── ConnectorError -│ ├── PaymentError -│ ├── ERPError -│ ├── SyncError -│ └── WebhookError -├── ValidationError -└── TimeoutError -``` - -### Error Response Format - -```python -{ - "success": false, - "error": "Error message", - "error_code": "ERROR_CODE", - "details": { - "field": "value", - "additional": "info" - } -} -``` - -## Rate Limiting - -### Rate Limit Headers - -``` -X-RateLimit-Limit: 1000 -X-RateLimit-Remaining: 999 -X-RateLimit-Reset: 1640995200 -Retry-After: 60 -``` - -### Rate Limit Error - -```python -RateLimitError( - message="Rate limit exceeded", - retry_after=60 -) -``` - -## Metrics - -### Metric Types - -- **Counters**: Cumulative counts (requests, errors) -- **Gauges**: Current values (active connections) -- **Histograms**: Distributions (response times) -- **Timers**: Duration measurements - -### Metrics Format - -```python -{ - "timestamp": "2024-01-01T00:00:00Z", - "source": "aitbc-enterprise-sdk", - "metrics": [ - { - "name": "requests_total", - "value": 1000, - "tags": {"method": "POST", "status": "200"} - } - ] -} -``` - -## Authentication - -### Bearer Token - -```python -headers = { - "Authorization": "Bearer your-token" -} -``` - -### OAuth 2.0 - -```python -headers = { - "Authorization": "Bearer access-token" -} -``` - -### HMAC Signature - -```python -headers = { - "X-API-Key": "your-key", - "X-Timestamp": "1640995200", - "X-Signature": "signature" -} -``` - -## SDK Versioning - -The SDK follows semantic versioning: - -- **Major**: Breaking changes -- **Minor**: New features (backward compatible) -- **Patch**: Bug fixes (backward compatible) - -Example: `1.2.3` - -## Response Format - -### Success Response - -```python -{ - "success": true, - "data": {...}, - "metadata": {...} -} -``` - -### Error Response - -```python -{ - "success": false, - "error": "Error message", - "error_code": "ERROR_CODE", - "details": {...} -} -``` - -## Pagination - -### Request Parameters - -```python -{ - "limit": 100, - "offset": 0, - "starting_after": "cursor_id" -} -``` - -### Response Format - -```python -{ - "data": [...], - "has_more": true, - "next_page": "cursor_id" -} -``` diff --git a/enterprise-connectors/python-sdk/examples/stripe_example.py b/enterprise-connectors/python-sdk/examples/stripe_example.py deleted file mode 100644 index 49929169..00000000 --- a/enterprise-connectors/python-sdk/examples/stripe_example.py +++ /dev/null @@ -1,282 +0,0 @@ -""" -Example usage of Stripe connector with AITBC Enterprise SDK -""" - -import asyncio -import logging -from datetime import datetime - -from aitbc_enterprise import AITBCClient, ConnectorConfig -from aitbc_enterprise.payments import StripeConnector -from aitbc_enterprise.exceptions import PaymentError - - -async def main(): - """Example Stripe integration""" - - # Configure AITBC client - config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-api-key", - enterprise_id="enterprise-123", - webhook_secret="whsec_your-webhook-secret" - ) - - # Create AITBC client - async with AITBCClient(config) as client: - - # Initialize Stripe connector - stripe = StripeConnector( - client=client, - config=config, - stripe_api_key="sk_test_your-stripe-key", - webhook_secret="whsec_your-stripe-webhook-secret" - ) - - # Initialize connector - await stripe.initialize() - - try: - # Example 1: Create a payment method - print("Creating payment method...") - payment_method = await stripe.create_payment_method( - type="card", - card={ - "number": "4242424242424242", - "exp_month": 12, - "exp_year": 2024, - "cvc": "123" - }, - metadata={"order_id": "12345"} - ) - print(f"Created payment method: {payment_method.id}") - - # Example 2: Create a customer - print("\nCreating customer...") - customer_result = await stripe.execute_operation( - "create_customer", - { - "email": "customer@example.com", - "name": "John Doe", - "payment_method": payment_method.id - } - ) - - if customer_result.success: - customer_id = customer_result.data["id"] - print(f"Created customer: {customer_id}") - - # Example 3: Create a charge - print("\nCreating charge...") - charge = await stripe.create_charge( - amount=2000, # $20.00 - currency="usd", - source=payment_method.id, - description="AITBC GPU computing service", - metadata={"job_id": "job-123", "user_id": "user-456"} - ) - print(f"Created charge: {charge.id} - ${charge.amount / 100:.2f}") - - # Example 4: Create a refund - print("\nCreating refund...") - refund = await stripe.create_refund( - charge_id=charge.id, - amount=500, # $5.00 refund - reason="requested_by_customer" - ) - print(f"Created refund: {refund.id} - ${refund.amount / 100:.2f}") - - # Example 5: Create a subscription - print("\nCreating subscription...") - subscription = await stripe.create_subscription( - customer=customer_id, - items=[ - { - "price": "price_1PHQX2RxeKt9VJxXzZXYZABC", # Replace with actual price ID - "quantity": 1 - } - ], - metadata={"tier": "pro"} - ) - print(f"Created subscription: {subscription.id}") - - # Example 6: Batch operations - print("\nExecuting batch operations...") - batch_results = await stripe.batch_execute([ - { - "operation": "create_charge", - "data": { - "amount": 1000, - "currency": "usd", - "source": payment_method.id, - "description": "Batch charge 1" - } - }, - { - "operation": "create_charge", - "data": { - "amount": 1500, - "currency": "usd", - "source": payment_method.id, - "description": "Batch charge 2" - } - } - ]) - - successful = sum(1 for r in batch_results if r.success) - print(f"Batch completed: {successful}/{len(batch_results)} successful") - - # Example 7: Check balance - print("\nRetrieving balance...") - balance = await stripe.retrieve_balance() - available = balance.get("available", [{}])[0].get("amount", 0) - print(f"Available balance: ${available / 100:.2f}") - - # Example 8: Get connector metrics - print("\nConnector metrics:") - metrics = stripe.metrics - for key, value in metrics.items(): - print(f" {key}: {value}") - - except PaymentError as e: - print(f"Payment error: {e}") - except Exception as e: - print(f"Unexpected error: {e}") - - finally: - # Cleanup - await stripe.cleanup() - - -async def webhook_example(): - """Example webhook handling""" - - config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-api-key" - ) - - async with AITBCClient(config) as client: - - stripe = StripeConnector( - client=client, - config=config, - stripe_api_key="sk_test_your-stripe-key", - webhook_secret="whsec_your-stripe-webhook-secret" - ) - - await stripe.initialize() - - # Example webhook payload (you'd get this from Stripe) - webhook_payload = b''' - { - "id": "evt_1234567890", - "object": "event", - "api_version": "2023-10-16", - "created": 1703220000, - "type": "charge.succeeded", - "data": { - "object": { - "id": "ch_1234567890", - "object": "charge", - "amount": 2000, - "currency": "usd", - "status": "succeeded" - } - } - } - ''' - - # Example signature (you'd get this from Stripe) - signature = "t=1703220000,v1=5257a869e7ecebeda32affa62ca2d3220b9a825a170d2e87a2ca2b10ef5" - - # Verify webhook - if await stripe.verify_webhook(webhook_payload, signature): - print("Webhook signature verified") - - # Handle webhook - result = await stripe.handle_webhook(webhook_payload) - print(f"Webhook processed: {result}") - else: - print("Invalid webhook signature") - - await stripe.cleanup() - - -async def enterprise_features_example(): - """Example with enterprise features""" - - # Enterprise configuration - config = ConnectorConfig( - base_url="https://api.aitbc.io", - api_key="your-enterprise-api-key", - enterprise_id="enterprise-123", - tenant_id="tenant-456", - region="us-east-1", - rate_limit=100, # 100 requests per second - enable_metrics=True, - log_level="DEBUG" - ) - - async with AITBCClient(config) as client: - - # Add custom event handler - async def on_charge_created(data): - print(f"Charge created event: {data.get('id')}") - # Send to internal systems - await client.post( - "/internal/notifications", - json={ - "type": "charge_created", - "data": data - } - ) - - stripe = StripeConnector( - client=client, - config=config, - stripe_api_key="sk_test_your-stripe-key" - ) - - # Register event handler - stripe.add_operation_handler("create_charge", on_charge_created) - - await stripe.initialize() - - # Create charge (will trigger event handler) - charge = await stripe.create_charge( - amount=5000, - currency="usd", - source="pm_card_visa", - description="Enterprise GPU service", - metadata={ - "department": "engineering", - "project": "ml-training", - "cost_center": "cc-123" - } - ) - - print(f"Enterprise charge created: {charge.id}") - - # Wait for event processing - await asyncio.sleep(1) - - await stripe.cleanup() - - -if __name__ == "__main__": - # Set up logging - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) - - # Run examples - print("=== Basic Stripe Example ===") - asyncio.run(main()) - - print("\n=== Webhook Example ===") - asyncio.run(webhook_example()) - - print("\n=== Enterprise Features Example ===") - asyncio.run(enterprise_features_example()) diff --git a/examples/receipts-sign-verify/README.md b/examples/receipts-sign-verify/README.md deleted file mode 100644 index 0e8ce9c2..00000000 --- a/examples/receipts-sign-verify/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Receipts Sign & Verify Examples - -This directory contains sample scripts demonstrating how to interact with the -coordinator receipt endpoints and validate miner/coordinator signatures. - -## Prerequisites - -- Python 3.11+ -- Coordinator API running locally (defaults to `http://localhost:8011`) -- Client API key with access to the coordinator (defaults to - `REDACTED_CLIENT_KEY` in development fixtures) - -Install the helper packages: - -```bash -poetry install --directory packages/py/aitbc-crypto -poetry install --directory packages/py/aitbc-sdk -``` - -## Fetch and Verify - -`fetch_and_verify.py` fetches either the latest receipt or the entire receipt -history for a job, then verifies miner signatures and optional coordinator -attestations. - -```bash -export PYTHONPATH=packages/py/aitbc-sdk/src:packages/py/aitbc-crypto/src -python examples/receipts-sign-verify/fetch_and_verify.py --job-id \ - --coordinator http://localhost:8011 --api-key REDACTED_CLIENT_KEY -``` - -Use `--history` to iterate over all stored receipts: - -```bash -python examples/receipts-sign-verify/fetch_and_verify.py --job-id --history -``` - -The script prints whether the miner signature and each coordinator attestation -validated successfully. diff --git a/examples/receipts-sign-verify/fetch_and_verify.py b/examples/receipts-sign-verify/fetch_and_verify.py deleted file mode 100644 index eb90f561..00000000 --- a/examples/receipts-sign-verify/fetch_and_verify.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Example script that fetches a job receipt from the coordinator API and verifies signatures. - -Usage:: - - export PYTHONPATH=packages/py/aitbc-sdk/src:packages/py/aitbc-crypto/src - python examples/receipts-sign-verify/fetch_and_verify.py --job-id \ - --coordinator http://localhost:8011 --api-key REDACTED_CLIENT_KEY - -The script prints the verification results for the miner signature and any -coordinator attestations present on the receipt payload. -""" - -from __future__ import annotations - -import argparse -import sys -from typing import Iterable - -from aitbc_sdk import CoordinatorReceiptClient, verify_receipt - - -def _print_attestations(attestations: Iterable[bool]) -> None: - statuses = ["✔" if valid else "✖" for valid in attestations] - if statuses: - print("Coordinator attestations:", " ".join(statuses)) - else: - print("Coordinator attestations: none") - - -def main(argv: list[str] | None = None) -> int: - parser = argparse.ArgumentParser(description="Fetch and verify receipts") - parser.add_argument("--job-id", required=True, help="Job ID to fetch receipts for") - parser.add_argument( - "--coordinator", - default="http://localhost:8011", - help="Coordinator base URL (default: http://localhost:8011)", - ) - parser.add_argument( - "--api-key", - default="REDACTED_CLIENT_KEY", - help="Client API key to authenticate against the coordinator", - ) - parser.add_argument( - "--history", - action="store_true", - help="Fetch full receipt history instead of only the latest receipt", - ) - - args = parser.parse_args(argv) - - client = CoordinatorReceiptClient(args.coordinator, args.api_key) - - if args.history: - receipts = client.fetch_history(args.job_id) - if not receipts: - print("No receipts found for job", args.job_id) - return 0 - for idx, receipt in enumerate(receipts, start=1): - verification = verify_receipt(receipt) - print(f"Receipt #{idx} ({verification.receipt['receipt_id']}):") - print(" Miner signature valid:", verification.miner_signature.valid) - _print_attestations(att.valid for att in verification.coordinator_attestations) - return 0 - - receipt = client.fetch_latest(args.job_id) - if receipt is None: - print("Latest receipt not available for job", args.job_id) - return 1 - - verification = verify_receipt(receipt) - print("Latest receipt ID:", verification.receipt["receipt_id"]) - print("Miner signature valid:", verification.miner_signature.valid) - _print_attestations(att.valid for att in verification.coordinator_attestations) - return 0 - - -if __name__ == "__main__": # pragma: no cover - manual invocation - sys.exit(main()) diff --git a/extensions/aitbc-wallet-firefox-simple/README.md b/extensions/aitbc-wallet-firefox-simple/README.md deleted file mode 100644 index f1896532..00000000 --- a/extensions/aitbc-wallet-firefox-simple/README.md +++ /dev/null @@ -1,133 +0,0 @@ -# AITBC Wallet Extension for Firefox - -A Firefox browser extension that provides AITBC wallet functionality for interacting with the AITBC Trade Exchange and other dApps. - -## Differences from Chrome Version - -This version is specifically built for Firefox with the following differences: - -- Uses Manifest V2 (Firefox still requires V2 for full functionality) -- Uses `browser_action` instead of `action` (V2 syntax) -- Uses `chrome.runtime.connect()` for background script communication -- Background script uses persistent connections via ports - -## Installation - -### Development Installation - -1. Clone this repository -2. Open Firefox and navigate to `about:debugging` -3. Click "This Firefox" in the left sidebar -4. Click "Load Temporary Add-on..." -5. Select the `manifest.json` file from the `aitbc-wallet-firefox` folder - -### Production Installation - -The extension will be published to the Firefox Add-on Store (AMO). Installation instructions will be available once published. - -## Usage - -The usage is identical to the Chrome version: - -1. Install the AITBC Wallet extension -2. Navigate to https://aitbc.bubuit.net/Exchange -3. Toggle the switch from "Demo Mode" to "Real Mode" -4. Click "Connect AITBC Wallet" -5. Approve the connection request in the popup - -## Features - -- **Wallet Management**: Create new accounts or import existing private keys -- **Secure Storage**: Private keys are stored locally in Firefox's storage -- **dApp Integration**: Connect to AITBC Trade Exchange and other supported dApps -- **Transaction Signing**: Sign transactions and messages securely -- **Balance Tracking**: View your AITBC token balance - -## API Reference - -The extension injects a `window.aitbcWallet` object into supported dApps with the following methods: - -### `aitbcWallet.connect()` -Connect the dApp to the wallet. -```javascript -const response = await aitbcWallet.connect(); -console.log(response.address); // User's AITBC address -``` - -### `aitbcWallet.getAccount()` -Get the current account address. -```javascript -const address = await aitbcWallet.getAccount(); -``` - -### `aitbcWallet.getBalance(address)` -Get the AITBC balance for an address. -```javascript -const balance = await aitbcWallet.getBalance('aitbc1...'); -console.log(balance.amount); // Balance in AITBC -``` - -### `aitbcWallet.sendTransaction(to, amount, data)` -Send AITBC tokens to another address. -```javascript -const tx = await aitbcWallet.sendTransaction('aitbc1...', 100); -console.log(tx.hash); // Transaction hash -``` - -### `aitbcWallet.signMessage(message)` -Sign a message with the private key. -```javascript -const signature = await aitbcWallet.signMessage('Hello AITBC!'); -``` - -## Security Considerations - -- Private keys are stored locally in Firefox's storage -- Always verify you're on the correct domain before connecting -- Never share your private key with anyone -- Keep your browser and extension updated - -## Development - -To modify the extension: - -1. Make changes to the source files -2. Go to `about:debugging` in Firefox -3. Find "AITBC Wallet" and click "Reload" -4. Test your changes - -## File Structure - -``` -aitbc-wallet-firefox/ -├── manifest.json # Extension configuration (Manifest V2) -├── background.js # Background script for wallet operations -├── content.js # Content script for dApp communication -├── injected.js # Script injected into dApps -├── popup.html # Extension popup UI -├── popup.js # Popup logic -├── icons/ # Extension icons -└── README.md # This file -``` - -## Firefox-Specific Notes - -- Firefox requires Manifest V2 for extensions that use content scripts in this manner -- The `browser_action` API is used instead of the newer `action` API -- Background scripts use port-based communication for better performance -- Storage APIs use `chrome.storage` which is compatible with Firefox - -## Troubleshooting - -### Extension not loading -- Ensure you're loading the `manifest.json` file, not the folder -- Check the Browser Console for error messages (`Ctrl+Shift+J`) - -### dApp connection not working -- Refresh the dApp page after installing/updating the extension -- Check that the site is in the `matches` pattern in manifest.json -- Look for errors in the Browser Console - -### Permission errors -- Firefox may show additional permission prompts -- Make sure to allow all requested permissions when installing diff --git a/extensions/aitbc-wallet-firefox-simple/aitbc-wallet.xpi b/extensions/aitbc-wallet-firefox-simple/aitbc-wallet.xpi deleted file mode 100644 index 0fbbd246accef83ea7304ec27f432a1bfa2a497c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7748 zcma)>1yEe;mW3O42tgVR?%KG!ySoMm1Z|vP!QDN$6Ep-3E`d+7zq4?5 zVYYU5u-DW;0>IR1tGyr4RD190fdYVn-Ff-mKOVtqT}t0sQQM9g@`=WF8qmq=2*GPY z;V@#F>CJ{x=^Eku=vL1$i7s^+5Vun6j%~~PUSak=4~D3+E;C2Kb~2~;_p=XxC@X*H zr-ufg4;IH_HX?80;|H|o*z}8S;f(mER{aWVfR-JC-|{l2ra>h zSNMBI9B@(cs*`l*Qro(J0+^3(G9{Bl?1~`#`;N6n%+3Kq+ccFv3HYAxuw&Bp70FmV zRCVVX9jUR=KTs0&hYy*nVK#(s6{!x4>0J;+j=1>%UDlKvzH8;6@~EohJw4Y`Q&Q3F zB^e8bM8sW2p0pVYzRP-lvkbrg5KuibiiDvI3<@oA?xycBSD{vWQsAM+YR{vDp@c-H z5+Eb|x>D-iI82@kl=vJlRPaI7^Wxx6iJPnW>*%Fjh@c!<2Z5!RdbGHrl4F2!6%|p7 zw$S3Mc++g7-&pOL(csz+arQ}V%t1zImkv|Q3o_2YI#V&odjgIRTK%_2v%9%ePZIg znKTRD_|K$jYji}^0YKVKRP&^X6vQAE|4*1{>jE?cyU8dfCg|9YA6llb^_I3xJ^oKD{JbhiIFni*VaP+zj z!FGkG}Mk^v3tz5y3vQ7!o6Q1bYN$ zy`r8kbue7FNpTpkIcmnX*A7P@kk#~(_6{nQq--r)wOA{92Cvyy`!$)Q7w?uPx!?GL} zL`R(UoI+JxE`{=saKmq&qs0Mwl5Pt0et&F_q_wB6oWc4*uq_S&br9h4v z#iVe)P?IK+?r@Gp+~Q(NaJl%;X>;d@b2q@>ds_&kU; zT4Eg4aXr|RWEz<^Uc7Vwl4)p|P6^YY6;1SoNUc7-5=!VL!^=Pp-VDk>g)V$n?9))y z?7)z(q;#E{i$lBX3EI%nRraGSam1D^zrMQLn&0kX6Ibz_Zz9<~nlf~7h9Ee5(m=J6 zk}%Cv1BsEaVDBP>#lOm_!zhJEpzih0n)$x^TJO6>ucJ*YVK^Ex>}02yIW6jd${hf%Nk z3NF?HI}_inD7b!n1p^tAA)mxq92KM=xO~k}gVo*d+Ufx#dK=dySdUwh?g?GgmaC7& zB8rYn7rv&S+@;HiSQ>ff7Y|kkslkUgc(b?QqMLcvGb&243D7Um&TOyBLj(#Wxx(U= zBbh3@dLZPNDFxsk>%m6SQerC>`R@KaI74cVKCUw=s;xIL+tsu^1XIKo zd(spHA3u&CV?cDS*LYMPq>u)SPu)lFWy5O;gM%ZK;Y|eht`lQ3Yj`5=M2|&4+K_Nn zq&e8$;Uj;~tB&19J{ldQN9LO;x5Q`Ffjt3{Eg}6XzU1^<8Uq$!?kbjz_2mk9v%L!A zCzq+{gZ$?H(SX^b0+MjVSsfj$aJ?nX|cI~k{)BCPuvAPGn8Xp;Cjif+TCYXK*xWsAI)S#ly}f%@Y!=GgH5SZyLn zwscAyf3XfEhcNncO~bWV3RBE%3Hl*iYmIGXUDu%YQE%VoI zMXRrh^>{VV8M$k~)#LEgwVT7(**e(;f%@;kMaxjjNbicYfTv{|i(|%_o!f8_Jap4y zqj-KWVUQuI(vc-;@`P`tiOq#dRkB@gA1Mb*BV~?L;7`*OGtS5YKv;r}GIH%Cff9B* z$TPB(E;;eSu)xbZtv~((E?36bF1A>+>J#af?9vpN@V$DV{$q&a@It8aB5tGT+D9Ma zGCSWr&%`7gIC{|14}%`j2BGp(%e{LT_+T#c#1LTLb44_B^5^}7*>w<2)_8V4?z*yy zZ6w7v^YOM5lCC59*=y|WakaNX5GFl&C>@Z9Hhc(6@8@iRcjB)j_3~qsl3mqt0Gd zI4-7t&Can{%E&v}jxn?M4253@N40$8YBXW;i38y>Ul+1jk%4k=1br7< z>x|MYNW7rg#LxHD&GF;4O}u0v#b@jSEl~2BMHpG<T2aVsX*a;w9eO{2XLk^lNxoxw*+D1Adk;C6^$6jXMY3q8HC>`- z-hIU;-VL)PEP5XbaU^#aL;^Y-vqMwPbRxnKLS(7ok&p4DM&5y~1mhn7>$XVgV)CKq zJ8^yU<2Cj5i`nWHma3FA8lS?fBOE1r1kJ-3t)Pb)bRMFY|(Zf4P^1EiudCSIS*XiBL=jiA26%VA?u>E|b+aX|&QS?Cm zs7u?vXU>4+uUx*4Js-=iGr4u$dwrbeYzA2BMco?1q_~jSEsOJg&&_nVJ+;#F=xtX+ z&n0Q|~1@%3$we^?7;;2s+Ln#x>zo#k)*rk;6lBz{0f7XAn1K1VJY~^BS`>*PM zrIzX+!@2MAmlY1|Fn`+kgJyZoevy>RyjcjYVRc?Chor16tr{ua&{6gCuL9bc*HkTZ zd|lO&Z*G^C`j2Cb9xv`&?n7gbjSISyydP~{Z}NByg~d-Qa<&kP4s zM8_A6N{oV1n=+}ddc#@+YFb&$w;g zLSdEO{G5o?)8#PqBpjF+Sl>tYO6Z4oR#N;Su{AL_YPd=04KikC89QZYjmoM3dj;(B+Q$T%!d6?Xi1fMG78~{8LI1v0&r# ztk@dRwscSvnGR)EMkw$|wne$*K^YPi6?_0I{6? z2?@&J#TY&llz3SH!P|@pToKsHuJhhZgh1O1;Za>9fEJ@B`az)SVkHv^{kcgZ7Yq;m1bw@8cI_JJ+lk z;+F3pRT&w!l>1n)>>m**&*5Um)VHH8;sA+`FWSe9#Jlc}hQB*pm)oQrSJJOt3fnou zJ$wrkWX5mW<$OCja~Jc%)Wvpt{c2wZG>!aMDt8=$FyowT z2u^qv6dNTUn6s2p9yOZ#$lz9^1b)H85>)0HIst1)8G zKTIg)UPst5&6Bu1!(uB+-jR}&LU7ku@W{BmyN#@hlI(UJ?-969U4objhWSrDwMA14 z2gM%H*iEEEo5+6fuH7rDI{GhpjqQ8=#l$Z^0i>+dQi*449+&~2F~*Qy+zY{BWg1J-OP`dXFO9$J zI6k45b89tWxeR2A-zM_i_T4$fYg=_yr}z|Ere)O∓5KC@4+BS|mYsrin?-Fam$a z?jy5ZqQGtgv) zz^VoY-@ix7b(=~Q4%E)DjPj~`qM65$IzJ4ibU z8lILYBEfPXfROL%vEFXvN=?@~)mEP!W`I8Pesl1ZUeHHC-%rg_y?t$*7&92LTvQD^ z4uecGrVSsicDg1jRY@0StG}y;ZB(<(&r~aNhfOz zr$YYiAI_q66ri^oeF&1mdrso(od`70=OoPpbqDpIt1cISHIivKZ@e}@(x*g-BanH= z4y4HU{cSOV=G7(cU*j6O-!AJJ`AxiA&E}+e%8P!|nx*u7b2-c>TB6#A*Mf~zAV5he z8~SPOd}t_xDqEY?GC0 z;!~}SLOD75Udr4k0Culv&DbPmVgI)Ja^xV0lC2b6=s)Ld=&RpCfk(uU+F3EYDkp=} zxucw^(c-Ha+*PQkjle6R+{nEijl!)g#Ez2KCgcrGo{N#K69S`K$$&u`f~HW9+|XoZ zmw~BfK{bZvV6}QV87-}4Gn@ze?lmEUR4@3nE6%1iAEEI|XC|ng55pd3nMrKSOWsLM z)Ze8Y$q3W!v4y{H_ae0_9XGK`=oih$+P1mxc9Qh%Ur5PBsm|()4PK4We?a5^G8e4> zoV{u(tsk7qRtr6$KZ(dE8mU&Qr_^Wm=a(NMqU>EvS*;~o26+}wJW zD7+qpE$Y)ztDAdJioPFwrR1|1uWS9qSfZ)c`kI>QNsrmL=}hYHrdOY|85I*?nwzU8 z9d?O*yexKQDC`h=yr}F&g9fT=5F}W7&Vv)@WQbQonnTv0vC})}!AxL7E#9D&bd+yt zw&W~HIw^;CkaPnsc}ZC<4I6S-*?Z@!^>yRyPC=EHJ>s9EK*sH0^v9^Q5}n~Ho7Y%= zv9B-04~nvfI77&IhCL42kH%?>s04`fa+Cv~4}2CMXjbf@8`vIU#tJeqBXtdj{D(-O zRE@h>ELpk+?-CHk=jxg?TXS8v}(^cY=$0cVccBR?r`aXs{wDWL$|1@M2_jE$m z+~TyWT;bd-%+Q2s0zF(rb}z1kSA?FLeiVjllgxLEc-~Ms`O_Zr)ZwT=1u0p-MdfAq~lZ?h*4MKpz6jbZ?h~8?NnRlLF(3qsb{hxFm zWfJ=DZr|`qpB%++C3J6xx>MZf_Kp9*v?8zjQu=o8nx@$-p~8u3hCCXesha;_5T9gv z09>77EV@N(AFermCkp6VxO=+eLnT8EXiP0Bdp%GTVWQ6}u1rUT_qO~ z?Bn^Z{m*o{J0#Uo^(6Z>jW>yryS7{b>)Nd7Ecc~K$rTS#jxL~pv@cVVVGV{-rD|o~ z`XTjhZTyZ8KC$LIBR8(oBSmj#TpVF_)U=z6X4jgT54{cJH0(i<_0si+)MtPKJE8U6 zO_MiGKZ_DOrY99gL&Xa&=$4W&Ts|c>oXjUbFW*D8YAUe5sHTl%rEDBoC2Q=!ok+U7`)roK|64Ls(}(OR@q!&)-cs zW3`eGsFHDO@Rn9ZuYv;V%P_K01U%je?(B?|_NMghFl=)a^i<$XRc)ew2B&{o-KiRp zp4k(e;eR%LgCX@D7%in56>2|vb582@{P39S4o^G6?TTXTV=L6DopW{;4thKjWp!4) zrK)VGmz$+9ojm?U9!!al2mYBUNBC4NyhlnSNmD9y+Wj(?w zXYSu-z)%sl0s7hzM|%+keFe{ONPg#oOKISk%xkQ1M`0w ze|Rs3AOP^<0|M&&|M>Jb8xZbK8}M({=C8>A?K=GxIsHQZn+NrR{3rg?-;uuyrGG`b zApm}pP5(XK?;6=(c|%Bm-xafe5BU2g|5u acc.address); -} - -// Get balance for an address -async function getBalance(address) { - // In a real implementation, this would query the blockchain - // For demo, return stored balance - const result = await browser.storage.local.get(['accounts']); - const accounts = result.accounts || []; - const account = accounts.find(acc => acc.address === address); - - return { - address: address, - balance: account ? account.balance || 0 : 0, - symbol: 'AITBC' - }; -} - -// Send transaction -async function sendTransaction(params) { - // In a real implementation, this would create, sign, and broadcast a transaction - const { to, amount, data } = params; - - // Get current account - const result = await browser.storage.local.get(['currentAccount']); - const account = result.currentAccount; - - if (!account) { - throw new Error('No account connected'); - } - - // Confirm transaction - const confirmed = confirm(`Send ${amount} AITBC to ${to}?\n\nFrom: ${account.address}`); - if (!confirmed) { - throw new Error('Transaction rejected'); - } - - // Return mock transaction hash - return { - hash: '0x' + Array.from(crypto.getRandomValues(new Uint8Array(32)), b => b.toString(16).padStart(2, '0')).join(''), - status: 'pending' - }; -} - -// Sign message -async function signMessage(message) { - // Get current account - const result = await browser.storage.local.get(['currentAccount']); - const account = result.currentAccount; - - if (!account) { - throw new Error('No account connected'); - } - - // Confirm signing - const confirmed = confirm(`Sign the following message?\n\n"${message}"\n\nAccount: ${account.address}`); - if (!confirmed) { - throw new Error('Message signing rejected'); - } - - // In a real implementation, this would sign with the private key - // For demo, return a mock signature - const encoder = new TextEncoder(); - const data = encoder.encode(message + account.privateKey); - const hash = await crypto.subtle.digest('SHA-256', data); - - return Array.from(new Uint8Array(hash), b => b.toString(16).padStart(2, '0')).join(''); -} diff --git a/extensions/aitbc-wallet-firefox-simple/content.js b/extensions/aitbc-wallet-firefox-simple/content.js deleted file mode 100644 index f7f0cb10..00000000 --- a/extensions/aitbc-wallet-firefox-simple/content.js +++ /dev/null @@ -1,42 +0,0 @@ -// Content script for AITBC Wallet Firefox extension -(function() { - // Inject the wallet API into the page - const script = document.createElement('script'); - script.src = browser.runtime.getURL('injected.js'); - script.onload = function() { - this.remove(); - }; - (document.head || document.documentElement).appendChild(script); - - // Create a port to background script - const port = browser.runtime.connect({ name: "aitbc-wallet" }); - - // Listen for messages from the injected script - window.addEventListener('message', function(event) { - // Only accept messages from our own window - if (event.source !== window) return; - - if (event.data.type && event.data.type === 'AITBC_WALLET_REQUEST') { - // Add origin to the request - const requestWithOrigin = { - ...event.data, - origin: window.location.origin - }; - // Forward the request to the background script - port.postMessage(requestWithOrigin); - } - }); - - // Listen for responses from background script - port.onMessage.addListener(function(response) { - // Send the response back to the page - window.postMessage({ - type: 'AITBC_WALLET_RESPONSE', - id: response.id, - response: { - result: response.result, - error: response.error - } - }, '*'); - }); -})(); diff --git a/extensions/aitbc-wallet-firefox-simple/icons/icon-128.png b/extensions/aitbc-wallet-firefox-simple/icons/icon-128.png deleted file mode 100644 index 721612b231d0568e754342e7723d35bad1156944..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1766 zcmc(gYdjMQ0L6zy(-^YEYI(#+GI`F3HO7xfdB^Ue+~|r)jc8jmHjfZj>zYS?6Wz>G zD?*nwZzelz zyyGo%;wdI$ z^F!)_=;M)DnL*6g4U`ZDs~nYOR4vM=pr{{?cy+-E^CfbQDwwZKJEhch9}pb|Kv+@q zA@18X_GbUAnl6~);&mZc42?-Pu*N${Aiez#E!SurYUwj-hdFVX_Mzz^p;{tLxFj%J zXNfOV;ph?@XL?k*d$7HU@i{hT_%F78;Q&P>V~*yV$=lLlSn2O%I2@kyIKIUo=#s-$ zr8Kcg&&%INr{?9=j_4+IlQ97a?M}Vy()yw%)-|#~6mjbY;+!GS!N?)djWt$1h9AVf zy_dRrA*BjC-zGu2TsAf!Q+(ce--4|jdx4Z}H)z|}g`@pmS6t*At%^n9ei$BV&URAp zeJZm`)8ez=SmIfOFv>o?>)i%zwVq9nDed#VG$2-S%Ml{i=)aTdEm{V-H#X#Y{Hn2p}Bff?<0^f*xF^n zhkzhX*P6AYfR~!Ttt@EbDI{M~5XQyUt8^~*&~3RPswQr*3?ktZun~%xPiH(){i`ir zM_{KaX79|c)MQS;G201!ICkwPLeDHZpbh$({)vUxH9JGMUlTt(C?bAX*R)$WQG))c z9IcB;pn~JvDDQ_PD3dLGl&P6!u3nj)kZ^osZWMrmHjroPpBt)f~LwE7MY!XL)vSRYX2X*jRv@>3lon-pWmkdNz~?ND_k~ z85ZlPG_jM6$9Rxu?WHlbpcTq{Lu*Yyo5p3gvoz(y;XAuo#_cK%&Qj^Bh`M-jz%lm4 z_>U0{j}JKLt(>WH^f1#ZRF+c1am?j=!QETe6)I=lz*Y|AgV0B;XMqg}&a!zr2uOSe zHs2D%PPVUdY2|wvyVqf_x2z{}v_rH<3;TUvW)Pm*71I$SI&o7MdH z=r4~bbG8M|I4N`t$hCvQQjn7D1gU?KyD7xuw-H3h^d$C9tDJHF26EbPJ$Q8#uONk{8MxCgkq-KDC$=mq5ZccV2$ zP=URhWcz)VQH%wUIFJ(wN}b*YQ6VwM^GD)EyI(OVH~&nUhyP(Yd`0tXVkcNl)_Go{ zbDEur*i$DAEukTm_@O1E`}uD=a2Dz|b$eqhA`0&r%nA@+%9jA3bg_3j$p5^1Ahq4Z zHG$fG0OD+UXKw{=sz{2&rtOB28qK9vcBQK)T2_K-a`8sJRq<$pz?}-fHnpD_3vnID zm}$sq3#OeeL;K{So)S!_duus?;hCLT!+PVSp4;YxWsP9NjY)Ffd%j6^G|+&woLxoU zdY1hxzd5s;w=>r)_a z!2#8i6aLM0jF_Imn-$0-{!8LY(uS;rgUb|WIUbqt@U(E7@Km1v{}*~1^&K-2WMByR W&g-^_Ac@p3S_NCZJ4N*9G18X_A4X($k+jBJT&$ab4+2x)dLDH>`GhXV~ES~Q%% zup!E!hUiA2MG_TANKr#lN`fv61Q}spx1bdtLwn5P$&68ePFlYruPt zT*qb?=>(=h+))$MW)K>N{z);|y^VW}^w!hg@5%fo+OZjK^cF6CBK! z?mwymJfD+3p?eM>?&sr;>+PRNUIRk|W+xX4mkKo|n)^!wii1!O#&$YpKq?x^1KP*Y z=;;eE9LcpQe{@w`Z2|NyBG+MdVsZTeY?IvX5R?c?90h0_Lo1iKJF4Q@=WdVFbsi3h ujAL=(y{CVb?CW<#5?`hO1R#L_0DJ*9Bv_AMxpTe%0000KqqUMf0&uIKhCW~FWPeK=e*?M_vM{_{C@9!-tWh|JSE%7s><}g z0skgJq9jO^1c{O$Q4%Cdf<#G>CU@5Puakzes8)LX0ClbG=^`;hP$wF~SxaL(rm|d6p(Cqn z1(<#dPBqblS0lUcVz%Y3$^Y!@P5_KQ|2q=y!+VX&eOO(kVVVOp z2S|NmVvsji`8fgbv5WaI+d9~LAG4j*EYt7Fr1M@v{%jB=L82r`lmv;AAW;$|N`gd5 gkSGZfwdz2>0QH=oO>2nC{r~^~07*qoM6N<$f=EjrZ~y=R diff --git a/extensions/aitbc-wallet-firefox-simple/icons/icon.svg b/extensions/aitbc-wallet-firefox-simple/icons/icon.svg deleted file mode 100644 index 133cd75a..00000000 --- a/extensions/aitbc-wallet-firefox-simple/icons/icon.svg +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - AITBC - - - - diff --git a/extensions/aitbc-wallet-firefox-simple/injected.js b/extensions/aitbc-wallet-firefox-simple/injected.js deleted file mode 100644 index 711ac2ce..00000000 --- a/extensions/aitbc-wallet-firefox-simple/injected.js +++ /dev/null @@ -1,113 +0,0 @@ -// Injected script that provides the AITBC wallet API to the dApp -(function() { - // Create the wallet API object - const aitbcWallet = { - // Check if wallet is available - isAvailable: function() { - return true; - }, - - // Connect to wallet - connect: async function() { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - // Send request to content script - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: 'connect' - }, '*'); - - // Listen for response - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - console.log('Wallet response received:', event.data); - if (event.data.response && event.data.response.error) { - reject(new Error(event.data.response.error)); - } else if (event.data.response && event.data.response.result) { - resolve(event.data.response.result); - } else if (event.data.response) { - resolve(event.data.response); - } else { - reject(new Error('Invalid response from wallet')); - } - } - }; - - window.addEventListener('message', messageHandler); - - // Timeout after 30 seconds - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Connection timeout')); - }, 30000); - }); - }, - - // Get account address - getAccount: async function() { - const accounts = await this.request({ method: 'accounts' }); - return accounts[0]; - }, - - // Get balance - getBalance: async function(address) { - return this.request({ method: 'getBalance', params: { address } }); - }, - - // Send transaction - sendTransaction: async function(to, amount, data = null) { - return this.request({ - method: 'sendTransaction', - params: { to, amount, data } - }); - }, - - // Sign message - signMessage: async function(message) { - return this.request({ method: 'signMessage', params: { message } }); - }, - - // Generic request method - request: async function(payload) { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: payload.method, - params: payload.params || {} - }, '*'); - - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - if (event.data.response && event.data.response.error) { - reject(new Error(event.data.response.error)); - } else if (event.data.response) { - resolve(event.data.response); - } else { - reject(new Error('Invalid response from wallet')); - } - } - }; - - window.addEventListener('message', messageHandler); - - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Request timeout')); - }, 30000); - }); - } - }; - - // Inject the wallet API into the window object - window.aitbcWallet = aitbcWallet; - - // Fire an event to notify the dApp that the wallet is ready - window.dispatchEvent(new Event('aitbcWalletReady')); -})(); diff --git a/extensions/aitbc-wallet-firefox-simple/install.html b/extensions/aitbc-wallet-firefox-simple/install.html deleted file mode 100644 index dda703bb..00000000 --- a/extensions/aitbc-wallet-firefox-simple/install.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - - Install AITBC Wallet for Firefox - - - -
- -

AITBC Wallet

-

The secure wallet for AITBC tokens

- - - Download XPI File - - -
-

Installation Steps:

-
    -
  1. Download the XPI file using the button above
  2. -
  3. Open Firefox and type about:debugging in the address bar
  4. -
  5. Click "This Firefox" on the left
  6. -
  7. Click "Load Temporary Add-on..."
  8. -
  9. Select the downloaded XPI file
  10. -
-
- -
-
Secure local key storage
-
One-click dApp connection
-
Transaction signing
-
Balance tracking
-
- -

- After installation, click the AITBC icon in your toolbar to create or import a wallet. -

-
- - diff --git a/extensions/aitbc-wallet-firefox-simple/manifest.json b/extensions/aitbc-wallet-firefox-simple/manifest.json deleted file mode 100644 index 96d37ece..00000000 --- a/extensions/aitbc-wallet-firefox-simple/manifest.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "manifest_version": 2, - "name": "AITBC Wallet", - "version": "1.0.0", - "description": "AITBC Browser Wallet for trading and managing AITBC tokens", - - "permissions": [ - "storage", - "activeTab" - ], - - "content_scripts": [ - { - "matches": ["https://aitbc.bubuit.net/*", "http://localhost:3002/*"], - "js": ["content.js"], - "run_at": "document_start" - } - ], - - "browser_action": { - "default_popup": "popup.html", - "default_title": "AITBC Wallet", - "default_icon": { - "16": "icons/icon-16.png", - "32": "icons/icon-32.png", - "48": "icons/icon-48.png", - "128": "icons/icon-128.png" - } - }, - - "web_accessible_resources": [ - "injected.js" - ], - - "icons": { - "16": "icons/icon-16.png", - "32": "icons/icon-32.png", - "48": "icons/icon-48.png", - "128": "icons/icon-128.png" - }, - - "background": { - "scripts": ["background.js"], - "persistent": false - } -} diff --git a/extensions/aitbc-wallet-firefox-simple/popup.html b/extensions/aitbc-wallet-firefox-simple/popup.html deleted file mode 100644 index 346b983a..00000000 --- a/extensions/aitbc-wallet-firefox-simple/popup.html +++ /dev/null @@ -1,112 +0,0 @@ - - - - - - - -
- -

Wallet

-
- -
-
Account Address:
- -
Not connected
-
0 AITBC
-
- -
- - - - - -
- -
-

Recent Transactions

-
-
No transactions yet
-
-
- - - - diff --git a/extensions/aitbc-wallet-firefox-simple/popup.js b/extensions/aitbc-wallet-firefox-simple/popup.js deleted file mode 100644 index 79d1455f..00000000 --- a/extensions/aitbc-wallet-firefox-simple/popup.js +++ /dev/null @@ -1,315 +0,0 @@ -// Popup script for AITBC Wallet extension -let currentAccount = null; -let accounts = []; - -// Load wallet data on popup open -document.addEventListener('DOMContentLoaded', async function() { - await loadWalletData(); - updateUI(); - - // Check for pending connection request - const pending = await browser.storage.local.get(['pendingConnection']); - if (pending.pendingConnection) { - showConnectionDialog(pending.pendingConnection); - } - - // Add event listeners - document.getElementById('createAccountBtn').addEventListener('click', createAccount); - document.getElementById('importAccountBtn').addEventListener('click', importAccount); - document.getElementById('sendTokensBtn').addEventListener('click', sendTokens); - document.getElementById('receiveTokensBtn').addEventListener('click', receiveTokens); - document.getElementById('viewOnExplorerBtn').addEventListener('click', viewOnExplorer); - document.getElementById('accountSelector').addEventListener('change', switchAccount); -}); - -// Load wallet data from storage -async function loadWalletData() { - const result = await browser.storage.local.get(['accounts', 'currentAccount']); - accounts = result.accounts || []; - currentAccount = result.currentAccount || null; -} - -// Save wallet data to storage -async function saveWalletData() { - await browser.storage.local.set({ - accounts: accounts, - currentAccount: currentAccount - }); -} - -// Update UI with current wallet state -function updateUI() { - const addressEl = document.getElementById('accountAddress'); - const balanceEl = document.getElementById('balance'); - const selectorEl = document.getElementById('accountSelector'); - - // Update account selector - selectorEl.innerHTML = ''; - if (accounts.length === 0) { - const option = document.createElement('option'); - option.value = ''; - option.textContent = 'No accounts'; - selectorEl.appendChild(option); - } else { - accounts.forEach((account, index) => { - const option = document.createElement('option'); - option.value = index; - option.textContent = `Account ${index + 1} - ${account.address.substring(0, 20)}...`; - if (currentAccount && currentAccount.address === account.address) { - option.selected = true; - } - selectorEl.appendChild(option); - }); - } - - // Update current account display - if (currentAccount) { - addressEl.textContent = currentAccount.address; - balanceEl.textContent = `${currentAccount.balance || 0} AITBC`; - } else { - addressEl.textContent = 'Not connected'; - balanceEl.textContent = '0 AITBC'; - } -} - -// Show connection dialog -function showConnectionDialog(pendingConnection) { - const dialog = document.createElement('div'); - dialog.className = 'connection-dialog'; - dialog.innerHTML = ` -
-

Connection Request

-

${pendingConnection.origin} wants to connect to your AITBC Wallet

-

Address: ${pendingConnection.address}

-
- - -
-
- `; - - // Add styles for the dialog - const style = document.createElement('style'); - style.textContent = ` - .connection-dialog { - position: fixed; - top: 0; - left: 0; - right: 0; - bottom: 0; - background: rgba(0, 0, 0, 0.8); - display: flex; - align-items: center; - justify-content: center; - z-index: 1000; - } - .dialog-content { - background: white; - color: black; - padding: 20px; - border-radius: 8px; - max-width: 300px; - text-align: center; - } - .dialog-content h3 { - margin-top: 0; - } - .dialog-content .address { - font-family: monospace; - font-size: 12px; - word-break: break-all; - background: #f0f0f0; - padding: 5px; - border-radius: 4px; - margin: 10px 0; - } - .dialog-buttons { - display: flex; - gap: 10px; - justify-content: center; - margin-top: 20px; - } - .approve-btn { - background: #28a745; - color: white; - border: none; - padding: 8px 16px; - border-radius: 4px; - cursor: pointer; - } - .reject-btn { - background: #dc3545; - color: white; - border: none; - padding: 8px 16px; - border-radius: 4px; - cursor: pointer; - } - `; - - document.head.appendChild(style); - document.body.appendChild(dialog); - - // Handle button clicks - document.getElementById('approveConnection').addEventListener('click', async () => { - await browser.storage.local.set({ - connectionResponse: { - id: pendingConnection.id, - approved: true - } - }); - await browser.storage.local.remove(['pendingConnection']); - dialog.remove(); - style.remove(); - }); - - document.getElementById('rejectConnection').addEventListener('click', async () => { - await browser.storage.local.set({ - connectionResponse: { - id: pendingConnection.id, - approved: false - } - }); - await browser.storage.local.remove(['pendingConnection']); - dialog.remove(); - style.remove(); - }); -} - -// Switch to a different account -async function switchAccount() { - const selectorEl = document.getElementById('accountSelector'); - const selectedIndex = parseInt(selectorEl.value); - - if (isNaN(selectedIndex) || selectedIndex < 0 || selectedIndex >= accounts.length) { - return; - } - - currentAccount = accounts[selectedIndex]; - await saveWalletData(); - updateUI(); -} - -// Create a new account -async function createAccount() { - // Generate a new private key and address - const privateKey = generatePrivateKey(); - const address = await generateAddress(privateKey); - - const newAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - - accounts.push(newAccount); - currentAccount = newAccount; - await saveWalletData(); - updateUI(); - - alert('New account created! Please save your private key securely.'); -} - -// Import account from private key -async function importAccount() { - const privateKey = prompt('Enter your private key:'); - if (!privateKey) return; - - try { - const address = await generateAddress(privateKey); - - // Check if account already exists - const existing = accounts.find(a => a.address === address); - if (existing) { - currentAccount = existing; - } else { - currentAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - accounts.push(currentAccount); - } - - await saveWalletData(); - updateUI(); - alert('Account imported successfully!'); - } catch (error) { - alert('Invalid private key!'); - } -} - -// Send tokens -async function sendTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - const to = prompt('Send to address:'); - const amount = prompt('Amount:'); - - if (!to || !amount) return; - - // In a real implementation, this would create and sign a transaction - alert(`Would send ${amount} AITBC to ${to}`); -} - -// Receive tokens -function receiveTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - alert(`Your receiving address:\n${currentAccount.address}`); -} - -// View on explorer -function viewOnExplorer() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - browser.tabs.create({ url: `https://aitbc.bubuit.net/explorer/?address=${currentAccount.address}` }); -} - -// Generate a random private key (demo only) -function generatePrivateKey() { - const array = new Uint8Array(32); - crypto.getRandomValues(array); - return Array.from(array, byte => byte.toString(16).padStart(2, '0')).join(''); -} - -// Generate address from private key (demo only) -async function generateAddress(privateKey) { - // In a real implementation, this would derive the address from the private key - // using the appropriate cryptographic algorithm - const hash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(privateKey)); - return 'aitbc1' + Array.from(new Uint8Array(hash), b => b.toString(16).padStart(2, '0')).join('').substring(0, 40); -} - -// Listen for connection requests from dApps -browser.runtime.onMessage.addListener((request, sender, sendResponse) => { - if (request.method === 'connect') { - // Show connection dialog - const connected = confirm(`Allow this site to connect to your AITBC Wallet?`); - - if (connected && currentAccount) { - sendResponse({ - success: true, - address: currentAccount.address - }); - } else { - sendResponse({ - success: false, - error: 'User rejected connection' - }); - } - } - - return true; // Keep the message channel open for async response -}); diff --git a/extensions/aitbc-wallet-simple/README.md b/extensions/aitbc-wallet-simple/README.md deleted file mode 100644 index d632d7ef..00000000 --- a/extensions/aitbc-wallet-simple/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# AITBC Browser Wallet Extension - -A browser extension that provides AITBC wallet functionality for interacting with the AITBC Trade Exchange and other dApps. - -## Features - -- **Wallet Management**: Create new accounts or import existing private keys -- **Secure Storage**: Private keys are stored locally in the browser -- **dApp Integration**: Connect to AITBC Trade Exchange and other supported dApps -- **Transaction Signing**: Sign transactions and messages securely -- **Balance Tracking**: View your AITBC token balance - -## Installation - -### Development Installation - -1. Clone this repository -2. Open Chrome and navigate to `chrome://extensions/` -3. Enable "Developer mode" in the top right -4. Click "Load unpacked" -5. Select the `aitbc-wallet` folder - -### Production Installation - -The extension will be published to the Chrome Web Store. Installation instructions will be available once published. - -## Usage - -### Connecting to the Exchange - -1. Install the AITBC Wallet extension -2. Navigate to https://aitbc.bubuit.net/Exchange -3. Toggle the switch from "Demo Mode" to "Real Mode" -4. Click "Connect AITBC Wallet" -5. Approve the connection request in the popup - -### Managing Accounts - -1. Click the AITBC Wallet icon in your browser toolbar -2. Use "Create New Account" to generate a new wallet -3. Use "Import Private Key" to restore an existing wallet -4. **Important**: Save your private key securely! It cannot be recovered if lost. - -## API Reference - -The extension injects a `window.aitbcWallet` object into supported dApps with the following methods: - -### `aitbcWallet.connect()` -Connect the dApp to the wallet. -```javascript -const response = await aitbcWallet.connect(); -console.log(response.address); // User's AITBC address -``` - -### `aitbcWallet.getAccount()` -Get the current account address. -```javascript -const address = await aitbcWallet.getAccount(); -``` - -### `aitbcWallet.getBalance(address)` -Get the AITBC balance for an address. -```javascript -const balance = await aitbcWallet.getBalance('aitbc1...'); -console.log(balance.amount); // Balance in AITBC -``` - -### `aitbcWallet.sendTransaction(to, amount, data)` -Send AITBC tokens to another address. -```javascript -const tx = await aitbcWallet.sendTransaction('aitbc1...', 100); -console.log(tx.hash); // Transaction hash -``` - -### `aitbcWallet.signMessage(message)` -Sign a message with the private key. -```javascript -const signature = await aitbcWallet.signMessage('Hello AITBC!'); -``` - -## Security Considerations - -- Private keys are stored locally in Chrome's storage -- Always verify you're on the correct domain before connecting -- Never share your private key with anyone -- Keep your browser and extension updated - -## Development - -To modify the extension: - -1. Make changes to the source files -2. Go to `chrome://extensions/` -3. Click the refresh button on the AITBC Wallet card -4. Test your changes - -## File Structure - -``` -aitbc-wallet/ -├── manifest.json # Extension configuration -├── content.js # Content script for dApp communication -├── injected.js # Script injected into dApps -├── popup.html # Extension popup UI -├── popup.js # Popup logic -├── icons/ # Extension icons -└── README.md # This file -``` - -## Support - -For issues or feature requests, please create an issue in the repository. diff --git a/extensions/aitbc-wallet-simple/content.js b/extensions/aitbc-wallet-simple/content.js deleted file mode 100644 index da5f8e6e..00000000 --- a/extensions/aitbc-wallet-simple/content.js +++ /dev/null @@ -1,28 +0,0 @@ -// Content script for AITBC Wallet extension -(function() { - // Inject the wallet API into the page - const script = document.createElement('script'); - script.src = chrome.runtime.getURL('injected.js'); - script.onload = function() { - this.remove(); - }; - (document.head || document.documentElement).appendChild(script); - - // Listen for messages from the injected script - window.addEventListener('message', function(event) { - // Only accept messages from our own window - if (event.source !== window) return; - - if (event.data.type && event.data.type === 'AITBC_WALLET_REQUEST') { - // Forward the request to the background script - chrome.runtime.sendMessage(event.data, function(response) { - // Send the response back to the page - window.postMessage({ - type: 'AITBC_WALLET_RESPONSE', - id: event.data.id, - response: response - }, '*'); - }); - } - }); -})(); diff --git a/extensions/aitbc-wallet-simple/injected.js b/extensions/aitbc-wallet-simple/injected.js deleted file mode 100644 index e4cd6a0e..00000000 --- a/extensions/aitbc-wallet-simple/injected.js +++ /dev/null @@ -1,106 +0,0 @@ -// Injected script that provides the AITBC wallet API to the dApp -(function() { - // Create the wallet API object - const aitbcWallet = { - // Check if wallet is available - isAvailable: function() { - return true; - }, - - // Connect to wallet - connect: async function() { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - // Send request to content script - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: 'connect' - }, '*'); - - // Listen for response - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - if (event.data.response.error) { - reject(new Error(event.data.response.error)); - } else { - resolve(event.data.response); - } - } - }; - - window.addEventListener('message', messageHandler); - - // Timeout after 30 seconds - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Connection timeout')); - }, 30000); - }); - }, - - // Get account address - getAccount: async function() { - const accounts = await this.request({ method: 'accounts' }); - return accounts[0]; - }, - - // Get balance - getBalance: async function(address) { - return this.request({ method: 'getBalance', params: { address } }); - }, - - // Send transaction - sendTransaction: async function(to, amount, data = null) { - return this.request({ - method: 'sendTransaction', - params: { to, amount, data } - }); - }, - - // Sign message - signMessage: async function(message) { - return this.request({ method: 'signMessage', params: { message } }); - }, - - // Generic request method - request: async function(payload) { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: payload.method, - params: payload.params || {} - }, '*'); - - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - if (event.data.response.error) { - reject(new Error(event.data.response.error)); - } else { - resolve(event.data.response); - } - } - }; - - window.addEventListener('message', messageHandler); - - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Request timeout')); - }, 30000); - }); - } - }; - - // Inject the wallet API into the window object - window.aitbcWallet = aitbcWallet; - - // Fire an event to notify the dApp that the wallet is ready - window.dispatchEvent(new Event('aitbcWalletReady')); -})(); diff --git a/extensions/aitbc-wallet-simple/install.html b/extensions/aitbc-wallet-simple/install.html deleted file mode 100644 index b1b80faa..00000000 --- a/extensions/aitbc-wallet-simple/install.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - - - Install AITBC Wallet for Chrome - - - -
- -

AITBC Wallet

-

The secure wallet for AITBC tokens

- - - Download Extension - - -
-
-
1
-
-
Download the extension
-
Click the download button above
-
-
- -
-
2
-
-
Open Chrome Extensions
-
Navigate to chrome://extensions/
-
-
- -
-
3
-
-
Enable Developer Mode
-
Toggle the switch in the top right
-
-
- -
-
4
-
-
Load Extension
-
Click "Load unpacked" and select the extracted folder
-
-
-
- -

- Chrome requires developer mode for security. This ensures you know exactly what you're installing. -

-
- - diff --git a/extensions/aitbc-wallet-simple/manifest.json b/extensions/aitbc-wallet-simple/manifest.json deleted file mode 100644 index 7215abba..00000000 --- a/extensions/aitbc-wallet-simple/manifest.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "manifest_version": 3, - "name": "AITBC Wallet", - "version": "1.0.0", - "description": "AITBC Browser Wallet for trading and managing AITBC tokens", - "permissions": [ - "storage", - "activeTab" - ], - "content_scripts": [ - { - "matches": ["https://aitbc.bubuit.net/*", "http://localhost:3002/*"], - "js": ["content.js"], - "run_at": "document_start" - } - ], - "action": { - "default_popup": "popup.html", - "default_title": "AITBC Wallet" - }, - "web_accessible_resources": [ - { - "resources": ["injected.js"], - "matches": ["https://aitbc.bubuit.net/*", "http://localhost:3002/*"] - } - ], - "icons": { - "16": "icons/icon-16.png", - "48": "icons/icon-48.png", - "128": "icons/icon-128.png" - } -} diff --git a/extensions/aitbc-wallet-simple/popup.html b/extensions/aitbc-wallet-simple/popup.html deleted file mode 100644 index 16612b8c..00000000 --- a/extensions/aitbc-wallet-simple/popup.html +++ /dev/null @@ -1,109 +0,0 @@ - - - - - - - -
- -

Wallet

-
- -
-
Account Address:
-
Not connected
-
0 AITBC
-
- -
- - - - - -
- -
-

Recent Transactions

-
-
No transactions yet
-
-
- - - - diff --git a/extensions/aitbc-wallet-simple/popup.js b/extensions/aitbc-wallet-simple/popup.js deleted file mode 100644 index dee47403..00000000 --- a/extensions/aitbc-wallet-simple/popup.js +++ /dev/null @@ -1,162 +0,0 @@ -// Popup script for AITBC Wallet extension -let currentAccount = null; -let accounts = []; - -// Load wallet data on popup open -document.addEventListener('DOMContentLoaded', async function() { - await loadWalletData(); - updateUI(); -}); - -// Load wallet data from storage -async function loadWalletData() { - const result = await chrome.storage.local.get(['accounts', 'currentAccount']); - accounts = result.accounts || []; - currentAccount = result.currentAccount || null; -} - -// Save wallet data to storage -async function saveWalletData() { - await chrome.storage.local.set({ - accounts: accounts, - currentAccount: currentAccount - }); -} - -// Update UI with current wallet state -function updateUI() { - const addressEl = document.getElementById('accountAddress'); - const balanceEl = document.getElementById('balance'); - - if (currentAccount) { - addressEl.textContent = currentAccount.address; - balanceEl.textContent = `${currentAccount.balance || 0} AITBC`; - } else { - addressEl.textContent = 'Not connected'; - balanceEl.textContent = '0 AITBC'; - } -} - -// Create a new account -async function createAccount() { - // Generate a new private key and address - const privateKey = generatePrivateKey(); - const address = await generateAddress(privateKey); - - const newAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - - accounts.push(newAccount); - currentAccount = newAccount; - await saveWalletData(); - updateUI(); - - alert('New account created! Please save your private key securely.'); -} - -// Import account from private key -async function importAccount() { - const privateKey = prompt('Enter your private key:'); - if (!privateKey) return; - - try { - const address = await generateAddress(privateKey); - - // Check if account already exists - const existing = accounts.find(a => a.address === address); - if (existing) { - currentAccount = existing; - } else { - currentAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - accounts.push(currentAccount); - } - - await saveWalletData(); - updateUI(); - alert('Account imported successfully!'); - } catch (error) { - alert('Invalid private key!'); - } -} - -// Send tokens -async function sendTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - const to = prompt('Send to address:'); - const amount = prompt('Amount:'); - - if (!to || !amount) return; - - // In a real implementation, this would create and sign a transaction - alert(`Would send ${amount} AITBC to ${to}`); -} - -// Receive tokens -function receiveTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - alert(`Your receiving address:\n${currentAccount.address}`); -} - -// View on explorer -function viewOnExplorer() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - chrome.tabs.create({ url: `https://aitbc.bubuit.net/explorer/address/${currentAccount.address}` }); -} - -// Generate a random private key (demo only) -function generatePrivateKey() { - const array = new Uint8Array(32); - crypto.getRandomValues(array); - return Array.from(array, byte => byte.toString(16).padStart(2, '0')).join(''); -} - -// Generate address from private key (demo only) -async function generateAddress(privateKey) { - // In a real implementation, this would derive the address from the private key - // using the appropriate cryptographic algorithm - const hash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(privateKey)); - return 'aitbc1' + Array.from(new Uint8Array(hash), b => b.toString(16).padStart(2, '0')).join('').substring(0, 40); -} - -// Listen for connection requests from dApps -chrome.runtime.onMessage.addListener((request, sender, sendResponse) => { - if (request.method === 'connect') { - // Show connection dialog - const connected = confirm(`Allow this site to connect to your AITBC Wallet?`); - - if (connected && currentAccount) { - sendResponse({ - success: true, - address: currentAccount.address - }); - } else { - sendResponse({ - success: false, - error: 'User rejected connection' - }); - } - } - - return true; // Keep the message channel open for async response -}); diff --git a/extensions/aitbc-wallet/README.md b/extensions/aitbc-wallet/README.md deleted file mode 100644 index d632d7ef..00000000 --- a/extensions/aitbc-wallet/README.md +++ /dev/null @@ -1,112 +0,0 @@ -# AITBC Browser Wallet Extension - -A browser extension that provides AITBC wallet functionality for interacting with the AITBC Trade Exchange and other dApps. - -## Features - -- **Wallet Management**: Create new accounts or import existing private keys -- **Secure Storage**: Private keys are stored locally in the browser -- **dApp Integration**: Connect to AITBC Trade Exchange and other supported dApps -- **Transaction Signing**: Sign transactions and messages securely -- **Balance Tracking**: View your AITBC token balance - -## Installation - -### Development Installation - -1. Clone this repository -2. Open Chrome and navigate to `chrome://extensions/` -3. Enable "Developer mode" in the top right -4. Click "Load unpacked" -5. Select the `aitbc-wallet` folder - -### Production Installation - -The extension will be published to the Chrome Web Store. Installation instructions will be available once published. - -## Usage - -### Connecting to the Exchange - -1. Install the AITBC Wallet extension -2. Navigate to https://aitbc.bubuit.net/Exchange -3. Toggle the switch from "Demo Mode" to "Real Mode" -4. Click "Connect AITBC Wallet" -5. Approve the connection request in the popup - -### Managing Accounts - -1. Click the AITBC Wallet icon in your browser toolbar -2. Use "Create New Account" to generate a new wallet -3. Use "Import Private Key" to restore an existing wallet -4. **Important**: Save your private key securely! It cannot be recovered if lost. - -## API Reference - -The extension injects a `window.aitbcWallet` object into supported dApps with the following methods: - -### `aitbcWallet.connect()` -Connect the dApp to the wallet. -```javascript -const response = await aitbcWallet.connect(); -console.log(response.address); // User's AITBC address -``` - -### `aitbcWallet.getAccount()` -Get the current account address. -```javascript -const address = await aitbcWallet.getAccount(); -``` - -### `aitbcWallet.getBalance(address)` -Get the AITBC balance for an address. -```javascript -const balance = await aitbcWallet.getBalance('aitbc1...'); -console.log(balance.amount); // Balance in AITBC -``` - -### `aitbcWallet.sendTransaction(to, amount, data)` -Send AITBC tokens to another address. -```javascript -const tx = await aitbcWallet.sendTransaction('aitbc1...', 100); -console.log(tx.hash); // Transaction hash -``` - -### `aitbcWallet.signMessage(message)` -Sign a message with the private key. -```javascript -const signature = await aitbcWallet.signMessage('Hello AITBC!'); -``` - -## Security Considerations - -- Private keys are stored locally in Chrome's storage -- Always verify you're on the correct domain before connecting -- Never share your private key with anyone -- Keep your browser and extension updated - -## Development - -To modify the extension: - -1. Make changes to the source files -2. Go to `chrome://extensions/` -3. Click the refresh button on the AITBC Wallet card -4. Test your changes - -## File Structure - -``` -aitbc-wallet/ -├── manifest.json # Extension configuration -├── content.js # Content script for dApp communication -├── injected.js # Script injected into dApps -├── popup.html # Extension popup UI -├── popup.js # Popup logic -├── icons/ # Extension icons -└── README.md # This file -``` - -## Support - -For issues or feature requests, please create an issue in the repository. diff --git a/extensions/aitbc-wallet/content.js b/extensions/aitbc-wallet/content.js deleted file mode 100644 index da5f8e6e..00000000 --- a/extensions/aitbc-wallet/content.js +++ /dev/null @@ -1,28 +0,0 @@ -// Content script for AITBC Wallet extension -(function() { - // Inject the wallet API into the page - const script = document.createElement('script'); - script.src = chrome.runtime.getURL('injected.js'); - script.onload = function() { - this.remove(); - }; - (document.head || document.documentElement).appendChild(script); - - // Listen for messages from the injected script - window.addEventListener('message', function(event) { - // Only accept messages from our own window - if (event.source !== window) return; - - if (event.data.type && event.data.type === 'AITBC_WALLET_REQUEST') { - // Forward the request to the background script - chrome.runtime.sendMessage(event.data, function(response) { - // Send the response back to the page - window.postMessage({ - type: 'AITBC_WALLET_RESPONSE', - id: event.data.id, - response: response - }, '*'); - }); - } - }); -})(); diff --git a/extensions/aitbc-wallet/injected.js b/extensions/aitbc-wallet/injected.js deleted file mode 100644 index e4cd6a0e..00000000 --- a/extensions/aitbc-wallet/injected.js +++ /dev/null @@ -1,106 +0,0 @@ -// Injected script that provides the AITBC wallet API to the dApp -(function() { - // Create the wallet API object - const aitbcWallet = { - // Check if wallet is available - isAvailable: function() { - return true; - }, - - // Connect to wallet - connect: async function() { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - // Send request to content script - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: 'connect' - }, '*'); - - // Listen for response - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - if (event.data.response.error) { - reject(new Error(event.data.response.error)); - } else { - resolve(event.data.response); - } - } - }; - - window.addEventListener('message', messageHandler); - - // Timeout after 30 seconds - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Connection timeout')); - }, 30000); - }); - }, - - // Get account address - getAccount: async function() { - const accounts = await this.request({ method: 'accounts' }); - return accounts[0]; - }, - - // Get balance - getBalance: async function(address) { - return this.request({ method: 'getBalance', params: { address } }); - }, - - // Send transaction - sendTransaction: async function(to, amount, data = null) { - return this.request({ - method: 'sendTransaction', - params: { to, amount, data } - }); - }, - - // Sign message - signMessage: async function(message) { - return this.request({ method: 'signMessage', params: { message } }); - }, - - // Generic request method - request: async function(payload) { - return new Promise((resolve, reject) => { - const requestId = Date.now().toString(); - - window.postMessage({ - type: 'AITBC_WALLET_REQUEST', - id: requestId, - method: payload.method, - params: payload.params || {} - }, '*'); - - const messageHandler = function(event) { - if (event.data.type === 'AITBC_WALLET_RESPONSE' && event.data.id === requestId) { - window.removeEventListener('message', messageHandler); - if (event.data.response.error) { - reject(new Error(event.data.response.error)); - } else { - resolve(event.data.response); - } - } - }; - - window.addEventListener('message', messageHandler); - - setTimeout(() => { - window.removeEventListener('message', messageHandler); - reject(new Error('Request timeout')); - }, 30000); - }); - } - }; - - // Inject the wallet API into the window object - window.aitbcWallet = aitbcWallet; - - // Fire an event to notify the dApp that the wallet is ready - window.dispatchEvent(new Event('aitbcWalletReady')); -})(); diff --git a/extensions/aitbc-wallet/manifest.json b/extensions/aitbc-wallet/manifest.json deleted file mode 100644 index 7215abba..00000000 --- a/extensions/aitbc-wallet/manifest.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "manifest_version": 3, - "name": "AITBC Wallet", - "version": "1.0.0", - "description": "AITBC Browser Wallet for trading and managing AITBC tokens", - "permissions": [ - "storage", - "activeTab" - ], - "content_scripts": [ - { - "matches": ["https://aitbc.bubuit.net/*", "http://localhost:3002/*"], - "js": ["content.js"], - "run_at": "document_start" - } - ], - "action": { - "default_popup": "popup.html", - "default_title": "AITBC Wallet" - }, - "web_accessible_resources": [ - { - "resources": ["injected.js"], - "matches": ["https://aitbc.bubuit.net/*", "http://localhost:3002/*"] - } - ], - "icons": { - "16": "icons/icon-16.png", - "48": "icons/icon-48.png", - "128": "icons/icon-128.png" - } -} diff --git a/extensions/aitbc-wallet/popup.html b/extensions/aitbc-wallet/popup.html deleted file mode 100644 index 16612b8c..00000000 --- a/extensions/aitbc-wallet/popup.html +++ /dev/null @@ -1,109 +0,0 @@ - - - - - - - -
- -

Wallet

-
- -
-
Account Address:
-
Not connected
-
0 AITBC
-
- -
- - - - - -
- -
-

Recent Transactions

-
-
No transactions yet
-
-
- - - - diff --git a/extensions/aitbc-wallet/popup.js b/extensions/aitbc-wallet/popup.js deleted file mode 100644 index dee47403..00000000 --- a/extensions/aitbc-wallet/popup.js +++ /dev/null @@ -1,162 +0,0 @@ -// Popup script for AITBC Wallet extension -let currentAccount = null; -let accounts = []; - -// Load wallet data on popup open -document.addEventListener('DOMContentLoaded', async function() { - await loadWalletData(); - updateUI(); -}); - -// Load wallet data from storage -async function loadWalletData() { - const result = await chrome.storage.local.get(['accounts', 'currentAccount']); - accounts = result.accounts || []; - currentAccount = result.currentAccount || null; -} - -// Save wallet data to storage -async function saveWalletData() { - await chrome.storage.local.set({ - accounts: accounts, - currentAccount: currentAccount - }); -} - -// Update UI with current wallet state -function updateUI() { - const addressEl = document.getElementById('accountAddress'); - const balanceEl = document.getElementById('balance'); - - if (currentAccount) { - addressEl.textContent = currentAccount.address; - balanceEl.textContent = `${currentAccount.balance || 0} AITBC`; - } else { - addressEl.textContent = 'Not connected'; - balanceEl.textContent = '0 AITBC'; - } -} - -// Create a new account -async function createAccount() { - // Generate a new private key and address - const privateKey = generatePrivateKey(); - const address = await generateAddress(privateKey); - - const newAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - - accounts.push(newAccount); - currentAccount = newAccount; - await saveWalletData(); - updateUI(); - - alert('New account created! Please save your private key securely.'); -} - -// Import account from private key -async function importAccount() { - const privateKey = prompt('Enter your private key:'); - if (!privateKey) return; - - try { - const address = await generateAddress(privateKey); - - // Check if account already exists - const existing = accounts.find(a => a.address === address); - if (existing) { - currentAccount = existing; - } else { - currentAccount = { - address: address, - privateKey: privateKey, - balance: 0, - created: new Date().toISOString() - }; - accounts.push(currentAccount); - } - - await saveWalletData(); - updateUI(); - alert('Account imported successfully!'); - } catch (error) { - alert('Invalid private key!'); - } -} - -// Send tokens -async function sendTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - const to = prompt('Send to address:'); - const amount = prompt('Amount:'); - - if (!to || !amount) return; - - // In a real implementation, this would create and sign a transaction - alert(`Would send ${amount} AITBC to ${to}`); -} - -// Receive tokens -function receiveTokens() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - alert(`Your receiving address:\n${currentAccount.address}`); -} - -// View on explorer -function viewOnExplorer() { - if (!currentAccount) { - alert('Please create or import an account first!'); - return; - } - - chrome.tabs.create({ url: `https://aitbc.bubuit.net/explorer/address/${currentAccount.address}` }); -} - -// Generate a random private key (demo only) -function generatePrivateKey() { - const array = new Uint8Array(32); - crypto.getRandomValues(array); - return Array.from(array, byte => byte.toString(16).padStart(2, '0')).join(''); -} - -// Generate address from private key (demo only) -async function generateAddress(privateKey) { - // In a real implementation, this would derive the address from the private key - // using the appropriate cryptographic algorithm - const hash = await crypto.subtle.digest('SHA-256', new TextEncoder().encode(privateKey)); - return 'aitbc1' + Array.from(new Uint8Array(hash), b => b.toString(16).padStart(2, '0')).join('').substring(0, 40); -} - -// Listen for connection requests from dApps -chrome.runtime.onMessage.addListener((request, sender, sendResponse) => { - if (request.method === 'connect') { - // Show connection dialog - const connected = confirm(`Allow this site to connect to your AITBC Wallet?`); - - if (connected && currentAccount) { - sendResponse({ - success: true, - address: currentAccount.address - }); - } else { - sendResponse({ - success: false, - error: 'User rejected connection' - }); - } - } - - return true; // Keep the message channel open for async response -}); diff --git a/governance/README.md b/governance/README.md deleted file mode 100644 index f1d5eb7c..00000000 --- a/governance/README.md +++ /dev/null @@ -1,203 +0,0 @@ ---- -title: AITBC Governance -description: Community governance and decision-making for the AITBC protocol -layout: default ---- - -# AITBC Governance - -Welcome to the AITBC governance hub. This site serves as the central location for all governance-related information, including RFCs, decision records, and community participation guidelines. - -## Quick Navigation - -- [RFC Process](/rfc-process) - How to propose changes to AITBC -- [Active RFCs](/active-rfcs) - Currently under discussion -- [Accepted RFCs](/accepted-rfcs) - Approved proposals -- [Community Calls](/calls) - Meeting schedule and recordings -- [Governance Framework](/framework) - Roles and responsibilities -- [Decision Log](/decisions) - Historical decisions and rationale - -## Latest Updates - -### Recent RFCs - -| RFC | Title | Status | Date | -|-----|-------|--------|------| -| [RFC-001](/rfcs/001-multi-tenant-architecture) | Multi-tenant Architecture | Accepted | 2024-01-15 | -| [RFC-002](/rfcs/002-ecosystem-certification) | Ecosystem Certification Program | Accepted | 2024-01-15 | - -### Upcoming Events - -- **Community Call**: 2024-01-22 at 14:00 UTC -- **RFC Review**: 2024-01-24 at 16:00 UTC -- **Governance Meeting**: 2024-01-29 at 15:00 UTC - -## How to Participate - -### 1. Stay Informed -- Join our [Discord server](https://discord.gg/aitbc) -- Subscribe to the [governance mailing list](mailto:governance@aitbc.io) -- Watch the [rfcs repository](https://github.com/aitbc/rfcs) - -### 2. Contribute to RFCs -- Review draft RFCs on GitHub -- Comment with technical feedback -- Submit implementation proposals -- Join community discussions - -### 3. Join Community Calls -- Weekly technical syncs (Tuesdays 14:00 UTC) -- Bi-weekly governance meetings (alternating Mondays) -- Monthly RFC reviews (last Thursday) - -### 4. Vote on Decisions -- Informal voting via GitHub reactions -- Formal governance votes (when applicable) -- Delegate participation if unable to attend - -## Governance Structure - -### Current Model: Benevolent Dictator -- AITBC Foundation holds final decision authority -- Community input strongly considered -- Transparent decision-making process -- Gradual decentralization planned - -### Transition Plan -- Phase 1 (Current): Foundation-led with community input -- Phase 2 (After 100 RFCs): Delegate voting system -- Phase 3 (Future): Full on-chain governance - -### Roles and Responsibilities - -#### AITBC Foundation -- Maintains core protocol -- Ensures network security -- Facilitates community growth -- Holds veto power (with sunset clause) - -#### Core Team -- Technical review of RFCs -- Implementation of accepted proposals -- Network operations -- Security oversight - -#### Community Contributors -- RFC proposals and reviews -- Code contributions -- Documentation -- Testing and feedback - -#### Delegates -- Represent stakeholder interests -- Vote on governance decisions -- Participate in working groups -- Communicate with constituencies - -## Working Groups - -### Protocol Working Group -- Core protocol improvements -- Consensus mechanisms -- Cross-chain functionality -- Network parameters - -### Ecosystem Working Group -- SDK standards -- Connector specifications -- Certification requirements -- Developer experience - -### Governance Working Group -- Process improvements -- Election procedures -- Community guidelines -- Transparency initiatives - -### Security Working Group -- Security audits -- Vulnerability disclosure -- Best practices -- Incident response - -## Decision Making Process - -### Types of Decisions - -#### Protocol Changes -- Require RFC process -- Technical review mandatory -- Security assessment required -- Implementation timeline specified - -#### Governance Changes -- Require RFC process -- Community consultation required -- 30-day comment period -- Supermajority approval needed - -#### Operational Decisions -- Made by Core Team -- Documented in decision log -- Community notification required -- Appeal process available - -### Voting Mechanisms - -#### Informal Voting -- GitHub reactions on RFC PRs -- Discord polls for minor decisions -- Show of hands in community calls -- Non-binding but influential - -#### Formal Voting -- Token-based voting (Phase 2) -- Delegate voting (Phase 2) -- Multi-sig wallet approvals -- On-chain execution - -## Transparency and Accountability - -### Decision Records -- All decisions documented -- Rationale clearly explained -- Alternatives considered -- Implementation status tracked - -### Financial Transparency -- Foundation treasury reports -- Grant program updates -- Expense documentation -- Annual financial statements - -### Performance Metrics -- Network statistics -- Developer activity -- Ecosystem growth -- Governance participation - -## Get Started - -1. **Read the RFC Process** - Understand how to propose changes -2. **Join the Community** - Connect with other contributors -3. **Review Active RFCs** - Participate in current discussions -4. **Attend a Call** - Join the next community meeting -5. **Contribute** - Start with documentation or testing - -## Contact - -- **Governance**: governance@aitbc.io -- **RFC Process**: rfcs@aitbc.io -- **Security**: security@aitbc.io -- **General**: community@aitbc.io - -## Archives - -- [Historical Decisions](/archives/decisions) -- [Past Community Calls](/archives/calls) -- [Rejected RFCs](/archives/rejected-rfcs) -- [Governance Evolution](/archives/evolution) - ---- - -*This governance site is maintained by the AITBC community. Last updated: 2024-01-15* diff --git a/governance/calls.md b/governance/calls.md deleted file mode 100644 index e2487cb0..00000000 --- a/governance/calls.md +++ /dev/null @@ -1,283 +0,0 @@ ---- -title: Community Calls -description: Schedule, recordings, and participation guidelines for AITBC community calls -layout: default ---- - -# AITBC Community Calls - -Community calls are regular meetings where the AITBC community discusses technical topics, reviews RFCs, and makes governance decisions. All calls are open to the public. - -## Call Schedule - -### Weekly Technical Sync -- **When**: Every Tuesday at 14:00 UTC -- **Duration**: 60 minutes -- **Focus**: Technical updates, development progress, Q&A -- **Recording**: Yes, published within 24 hours - -### Bi-weekly Governance Meeting -- **When**: Alternating Mondays at 15:00 UTC -- **Duration**: 90 minutes -- **Focus**: RFC reviews, governance decisions, policy discussions -- **Recording**: Yes, published within 48 hours - -### Monthly RFC Review -- **When**: Last Thursday of the month at 16:00 UTC -- **Duration**: 120 minutes -- **Focus**: Deep dive into active RFCs, author presentations -- **Recording**: Yes, published within 72 hours - -### Quarterly Town Hall -- **When**: First Friday of the quarter at 18:00 UTC -- **Duration**: 90 minutes -- **Focus**: Roadmap updates, ecosystem announcements, community awards -- **Recording**: Yes, live-streamed - -## Upcoming Calls - -| Date | Time (UTC) | Type | Agenda | Recording | -|------|------------|------|--------|------------| -| 2024-01-16 | 14:00 | Technical Sync | [Agenda](#) | - | -| 2024-01-22 | 15:00 | Governance | [Agenda](#) | - | -| 2024-01-23 | 14:00 | Technical Sync | [Agenda](#) | - | -| 2024-01-25 | 16:00 | RFC Review | [Agenda](#) | - | - -## How to Join - -### Video Call -- **Zoom**: [Link](https://zoom.us/aitbc) -- **Meeting ID**: 123-456-7890 -- **Password**: aitbc2024 - -### Audio Only -- **Phone**: +1 (555) 123-4567 -- **International**: [List of numbers](https://aitbc.io/call-numbers) - -### Chat -- **Discord**: #community-calls channel -- **Slack**: #general channel - -## Participation Guidelines - -### Before the Call -1. **Review the Agenda** - Add topics to GitHub issues -2. **Prepare Questions** - Submit via GitHub or Discord -3. **Test Your Setup** - Check audio/video before joining -4. **Respect Time Zones** - Be mindful of global participants - -### During the Call -1. **Mute When Not Speaking** - Reduce background noise -2. **Use Raise Hand Feature** - Wait to be called on -3. **Be Concise** - Keep comments brief and on-topic -4. **Be Respectful** - Professional discourse required - -### After the Call -1. **Continue Discussion** - Use GitHub issues for follow-up -2. **Share Feedback** - Help us improve the format -3. **Take Action Items** - Complete assigned tasks -4. **Join Next Time** - Regular participation valued - -## Call Recordings - -### 2024-01-09 - Technical Sync -- **Topics**: Multi-tenant architecture, certification program -- **Duration**: 58 minutes -- **Recording**: [YouTube](https://youtu.be/example) -- **Summary**: [Notes](/calls/2024-01-09-summary) -- **Action Items**: [GitHub Project](https://github.com/aitbc/projects/1) - -### 2024-01-02 - Governance Meeting -- **Topics**: RFC process approval, governance framework -- **Duration**: 82 minutes -- **Recording**: [YouTube](https://youtu.be/example) -- **Summary**: [Notes](/calls/2024-01-02-summary) -- **Action Items**: [GitHub Project](https://github.com/aitbc/projects/1) - -### 2023-12-26 - Technical Sync -- **Topics**: Holiday break, Q1 planning -- **Duration**: 45 minutes -- **Recording**: [YouTube](https://youtu.be/example) -- **Summary**: [Notes](/calls/2023-12-26-summary) - -## Call Archives - -All recordings are available on: -- [YouTube Playlist](https://youtube.com/aitbc-calls) -- [Podcast Feed](https://aitbc.io/podcast) -- [Transcripts](/calls/transcripts) - -## Call Templates - -### Technical Sync Agenda Template - -```markdown -## Technical Sync - YYYY-MM-DD - -### Welcome & Announcements (5 min) -- Community updates -- New contributors -- Upcoming events - -### Development Updates (20 min) -- Core protocol progress -- SDK updates -- Infrastructure status -- Bug fixes - -### RFC Review (15 min) -- New RFCs -- Active discussions -- Implementation status - -### Community Updates (10 min) -- Ecosystem news -- Partner updates -- Community highlights - -### Q&A (10 min) -- Open floor for questions -- Help wanted items -- Next steps - -### Action Items Review (5 min) -- Previous items status -- New assignments -- Follow-up required -``` - -### Governance Meeting Agenda Template - -```markdown -## Governance Meeting - YYYY-MM-DD - -### Call to Order (5 min) -- Quorum check -- Previous minutes approval -- Action items review - -### RFC Discussions (30 min) -- RFC-XXX: [Title] - - Status update - - Feedback summary - - Decision needed -- RFC-YYY: [Title] - - Implementation progress - - Blockers identified - -### Governance Matters (20 min) -- Process improvements -- Policy updates -- Community feedback -- Election updates - -### Ecosystem Updates (15 min) -- Partner certifications -- Developer metrics -- Grant programs -- Marketing initiatives - -### Open Floor (10 min) -- Community proposals -- Urgent matters -- Future agenda items - -### Adjournment (5 min) -- Summary of decisions -- Action items assigned -- Next meeting date -``` - -## Moderation Guidelines - -### Moderators -- **Primary**: AITBC Foundation representative -- **Secondary**: Core team member -- **Community**: Rotating volunteer - -### Responsibilities -1. Keep discussions on topic -2. Ensure all voices are heard -3. Manage time effectively -4. Document decisions and action items -5. Enforce code of conduct - -### Code of Conduct -- Be respectful and inclusive -- No harassment or discrimination -- Professional language required -- Confidential information protected -- Violations result in removal - -## Special Events - -### Hackathons -- **Frequency**: Quarterly -- **Duration**: 48 hours -- **Format**: Virtual + optional meetups -- **Prizes**: Grants and recognition - -### Workshops -- **Frequency**: Monthly -- **Topics**: Technical deep dives -- **Format**: Interactive sessions -- **Materials**: Published afterward - -### Conferences -- **Frequency**: Annual -- **Location**: Rotating global cities -- **Tracks**: Technical, Business, Community -- **CFP**: Open 6 months prior - -## Feedback and Improvement - -### Provide Feedback -- **Survey**: Quarterly community survey -- **Issues**: Use GitHub for suggestions -- **Email**: calls@aitbc.io -- **Discord**: #feedback channel - -### Metrics We Track -- Attendance numbers -- Geographic distribution -- Participation diversity -- Satisfaction scores -- Action item completion - -### Recent Improvements -- Added transcription service -- Improved audio quality -- Better agenda management -- Enhanced documentation -- Mobile-friendly access - -## FAQ - -### Q: Can I present at a community call? -A: Yes! Submit your topic via GitHub issue with "presentation:" tag. - -### Q: Are calls mandatory for contributors? -A: No, but regular participation is valued for governance decisions. - -### Q: How are moderators selected? -A: Initially by Foundation, transitioning to community elections. - -### Q: Can I request a specific topic? -A: Absolutely! Add it to the agenda GitHub issue. - -### Q: What if I can't make the time? -A: All calls are recorded and transcribed for asynchronous participation. - -### Q: How are action items tracked? -A: Via GitHub Projects with assignees and due dates. - -## Contact - -- **Call Schedule**: schedule@aitbc.io -- **Technical Issues**: tech@aitbc.io -- **Moderation**: moderation@aitbc.io -- **General**: community@aitbc.io - ---- - -*Last updated: 2024-01-15* diff --git a/python-sdk/aitbc/apis/__init__.py b/python-sdk/aitbc/apis/__init__.py deleted file mode 100644 index b458ef55..00000000 --- a/python-sdk/aitbc/apis/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -API modules for AITBC Python SDK -""" - -from .jobs import JobsAPI, MultiNetworkJobsAPI -from .marketplace import MarketplaceAPI -from .wallet import WalletAPI -from .receipts import ReceiptsAPI -from .settlement import SettlementAPI, MultiNetworkSettlementAPI - -__all__ = [ - "JobsAPI", - "MultiNetworkJobsAPI", - "MarketplaceAPI", - "WalletAPI", - "ReceiptsAPI", - "SettlementAPI", - "MultiNetworkSettlementAPI", -] diff --git a/python-sdk/aitbc/apis/jobs.py b/python-sdk/aitbc/apis/jobs.py deleted file mode 100644 index f8b67c68..00000000 --- a/python-sdk/aitbc/apis/jobs.py +++ /dev/null @@ -1,94 +0,0 @@ -""" -Jobs API for AITBC Python SDK -""" - -from typing import Dict, Any, Optional, List -import logging - -from ..transport import Transport -from ..transport.multinetwork import MultiNetworkClient - -logger = logging.getLogger(__name__) - - -class JobsAPI: - """Jobs API client""" - - def __init__(self, transport: Transport): - self.transport = transport - - async def create(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Create a new job""" - return await self.transport.request('POST', '/v1/jobs', data=data) - - async def get(self, job_id: str) -> Dict[str, Any]: - """Get job details""" - return await self.transport.request('GET', f'/v1/jobs/{job_id}') - - async def list(self, **params) -> List[Dict[str, Any]]: - """List jobs""" - response = await self.transport.request('GET', '/v1/jobs', params=params) - return response.get('jobs', []) - - async def update(self, job_id: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Update job""" - return await self.transport.request('PUT', f'/v1/jobs/{job_id}', data=data) - - async def delete(self, job_id: str) -> None: - """Delete job""" - await self.transport.request('DELETE', f'/v1/jobs/{job_id}') - - async def wait_for_completion( - self, - job_id: str, - timeout: Optional[int] = None, - poll_interval: int = 5 - ) -> Dict[str, Any]: - """Wait for job completion""" - # Implementation would poll job status until complete - pass - - -class MultiNetworkJobsAPI(JobsAPI): - """Multi-network Jobs API client""" - - def __init__(self, client: MultiNetworkClient): - self.client = client - - async def create( - self, - data: Dict[str, Any], - chain_id: Optional[int] = None - ) -> Dict[str, Any]: - """Create a new job on specific network""" - transport = self.client.get_transport(chain_id) - return await transport.request('POST', '/v1/jobs', data=data) - - async def get( - self, - job_id: str, - chain_id: Optional[int] = None - ) -> Dict[str, Any]: - """Get job details from specific network""" - transport = self.client.get_transport(chain_id) - return await transport.request('GET', f'/v1/jobs/{job_id}') - - async def list( - self, - chain_id: Optional[int] = None, - **params - ) -> List[Dict[str, Any]]: - """List jobs from specific network""" - transport = self.client.get_transport(chain_id) - response = await transport.request('GET', '/v1/jobs', params=params) - return response.get('jobs', []) - - async def broadcast_create( - self, - data: Dict[str, Any], - chain_ids: Optional[List[int]] = None - ) -> Dict[int, Dict[str, Any]]: - """Create job on multiple networks""" - return await self.client.broadcast_request( - 'POST', '/v1/jobs', data=data, chain_ids=chain_ids - ) diff --git a/python-sdk/aitbc/apis/marketplace.py b/python-sdk/aitbc/apis/marketplace.py deleted file mode 100644 index a9912464..00000000 --- a/python-sdk/aitbc/apis/marketplace.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -Marketplace API for AITBC Python SDK -""" - -from typing import Dict, Any, Optional, List -import logging - -from ..transport import Transport - -logger = logging.getLogger(__name__) - - -class MarketplaceAPI: - """Marketplace API client""" - - def __init__(self, transport: Transport): - self.transport = transport - - async def list_offers(self, **params) -> List[Dict[str, Any]]: - """List marketplace offers""" - response = await self.transport.request('GET', '/v1/marketplace/offers', params=params) - return response.get('offers', []) - - async def create_offer(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Create a new offer""" - return await self.transport.request('POST', '/v1/marketplace/offers', data=data) - - async def get_offer(self, offer_id: str) -> Dict[str, Any]: - """Get offer details""" - return await self.transport.request('GET', f'/v1/marketplace/offers/{offer_id}') - - async def update_offer(self, offer_id: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Update offer""" - return await self.transport.request('PUT', f'/v1/marketplace/offers/{offer_id}', data=data) - - async def delete_offer(self, offer_id: str) -> None: - """Delete offer""" - await self.transport.request('DELETE', f'/v1/marketplace/offers/{offer_id}') - - async def accept_offer(self, offer_id: str, data: Dict[str, Any]) -> Dict[str, Any]: - """Accept an offer""" - return await self.transport.request('POST', f'/v1/marketplace/offers/{offer_id}/accept', data=data) - - async def get_stats(self) -> Dict[str, Any]: - """Get marketplace statistics""" - return await self.transport.request('GET', '/v1/marketplace/stats') diff --git a/python-sdk/aitbc/apis/receipts.py b/python-sdk/aitbc/apis/receipts.py deleted file mode 100644 index 77f819f7..00000000 --- a/python-sdk/aitbc/apis/receipts.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -Receipts API for AITBC Python SDK -""" - -from typing import Dict, Any, Optional, List -import logging - -from ..transport import Transport - -logger = logging.getLogger(__name__) - - -class ReceiptsAPI: - """Receipts API client""" - - def __init__(self, transport: Transport): - self.transport = transport - - async def get(self, job_id: str) -> Dict[str, Any]: - """Get job receipt""" - return await self.transport.request('GET', f'/v1/receipts/{job_id}') - - async def verify(self, receipt: Dict[str, Any]) -> Dict[str, Any]: - """Verify receipt""" - return await self.transport.request('POST', '/v1/receipts/verify', data=receipt) - - async def list(self, **params) -> List[Dict[str, Any]]: - """List receipts""" - response = await self.transport.request('GET', '/v1/receipts', params=params) - return response.get('receipts', []) - - async def stream(self, **params): - """Stream new receipts""" - return self.transport.stream('GET', '/v1/receipts/stream', params=params) diff --git a/python-sdk/aitbc/apis/settlement.py b/python-sdk/aitbc/apis/settlement.py deleted file mode 100644 index fca45cba..00000000 --- a/python-sdk/aitbc/apis/settlement.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -Settlement API for AITBC Python SDK -""" - -from typing import Dict, Any, Optional, List -import logging - -from ..transport import Transport -from ..transport.multinetwork import MultiNetworkClient - -logger = logging.getLogger(__name__) - - -class SettlementAPI: - """Settlement API client""" - - def __init__(self, transport: Transport): - self.transport = transport - - async def settle_cross_chain( - self, - job_id: str, - target_chain_id: int, - bridge_name: Optional[str] = None - ) -> Dict[str, Any]: - """Initiate cross-chain settlement""" - data = { - 'job_id': job_id, - 'target_chain_id': target_chain_id, - 'bridge_name': bridge_name - } - return await self.transport.request('POST', '/v1/settlement/cross-chain', data=data) - - async def get_settlement_status(self, message_id: str) -> Dict[str, Any]: - """Get settlement status""" - return await self.transport.request('GET', f'/v1/settlement/{message_id}/status') - - async def estimate_cost( - self, - job_id: str, - target_chain_id: int, - bridge_name: Optional[str] = None - ) -> Dict[str, Any]: - """Estimate settlement cost""" - data = { - 'job_id': job_id, - 'target_chain_id': target_chain_id, - 'bridge_name': bridge_name - } - return await self.transport.request('POST', '/v1/settlement/estimate-cost', data=data) - - async def list_bridges(self) -> Dict[str, Any]: - """List supported bridges""" - return await self.transport.request('GET', '/v1/settlement/bridges') - - async def list_chains(self) -> Dict[str, Any]: - """List supported chains""" - return await self.transport.request('GET', '/v1/settlement/chains') - - async def refund_settlement(self, message_id: str) -> Dict[str, Any]: - """Refund failed settlement""" - return await self.transport.request('POST', f'/v1/settlement/{message_id}/refund') - - -class MultiNetworkSettlementAPI(SettlementAPI): - """Multi-network Settlement API client""" - - def __init__(self, client: MultiNetworkClient): - self.client = client - - async def settle_cross_chain( - self, - job_id: str, - target_chain_id: int, - source_chain_id: Optional[int] = None, - bridge_name: Optional[str] = None - ) -> Dict[str, Any]: - """Initiate cross-chain settlement from specific network""" - transport = self.client.get_transport(source_chain_id) - data = { - 'job_id': job_id, - 'target_chain_id': target_chain_id, - 'bridge_name': bridge_name - } - return await transport.request('POST', '/v1/settlement/cross-chain', data=data) - - async def batch_settle( - self, - job_ids: List[str], - target_chain_id: int, - bridge_name: Optional[str] = None - ) -> List[Dict[str, Any]]: - """Batch settle multiple jobs""" - data = { - 'job_ids': job_ids, - 'target_chain_id': target_chain_id, - 'bridge_name': bridge_name - } - transport = self.client.get_transport() - return await transport.request('POST', '/v1/settlement/batch', data=data) diff --git a/python-sdk/aitbc/apis/wallet.py b/python-sdk/aitbc/apis/wallet.py deleted file mode 100644 index 5eb149c5..00000000 --- a/python-sdk/aitbc/apis/wallet.py +++ /dev/null @@ -1,50 +0,0 @@ -""" -Wallet API for AITBC Python SDK -""" - -from typing import Dict, Any, Optional, List -import logging - -from ..transport import Transport - -logger = logging.getLogger(__name__) - - -class WalletAPI: - """Wallet API client""" - - def __init__(self, transport: Transport): - self.transport = transport - - async def create(self) -> Dict[str, Any]: - """Create a new wallet""" - return await self.transport.request('POST', '/v1/wallet') - - async def get_balance(self, token: Optional[str] = None) -> Dict[str, Any]: - """Get wallet balance""" - params = {} - if token: - params['token'] = token - return await self.transport.request('GET', '/v1/wallet/balance', params=params) - - async def send(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Send tokens""" - return await self.transport.request('POST', '/v1/wallet/send', data=data) - - async def get_address(self) -> str: - """Get wallet address""" - response = await self.transport.request('GET', '/v1/wallet/address') - return response.get('address') - - async def get_transactions(self, **params) -> List[Dict[str, Any]]: - """Get transaction history""" - response = await self.transport.request('GET', '/v1/wallet/transactions', params=params) - return response.get('transactions', []) - - async def stake(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Stake tokens""" - return await self.transport.request('POST', '/v1/wallet/stake', data=data) - - async def unstake(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Unstake tokens""" - return await self.transport.request('POST', '/v1/wallet/unstake', data=data) diff --git a/python-sdk/aitbc/client.py b/python-sdk/aitbc/client.py deleted file mode 100644 index df5db4c5..00000000 --- a/python-sdk/aitbc/client.py +++ /dev/null @@ -1,364 +0,0 @@ -""" -Main AITBC client with pluggable transport abstraction -""" - -import asyncio -import logging -from typing import Dict, Any, Optional, Union, List -from datetime import datetime - -from .transport import ( - Transport, - HTTPTransport, - WebSocketTransport, - MultiNetworkClient, - NetworkConfig, - TransportError -) -from .transport.base import BatchTransport, CachedTransport, RateLimitedTransport -from .apis.jobs import JobsAPI, MultiNetworkJobsAPI -from .apis.marketplace import MarketplaceAPI -from .apis.wallet import WalletAPI -from .apis.receipts import ReceiptsAPI -from .apis.settlement import SettlementAPI, MultiNetworkSettlementAPI - -logger = logging.getLogger(__name__) - - -class AITBCClient: - """AITBC client with pluggable transports and multi-network support""" - - def __init__( - self, - transport: Optional[Union[Transport, Dict[str, Any]]] = None, - multi_network: bool = False, - config: Optional[Dict[str, Any]] = None - ): - """ - Initialize AITBC client - - Args: - transport: Transport instance or configuration - multi_network: Enable multi-network mode - config: Additional configuration options - """ - self.config = config or {} - self._connected = False - self._apis = {} - - # Initialize transport layer - if multi_network: - self._init_multi_network(transport or {}) - else: - self._init_single_network(transport or self._get_default_config()) - - # Initialize API clients - self._init_apis() - - def _get_default_config(self) -> Dict[str, Any]: - """Get default configuration for backward compatibility""" - return { - 'type': 'http', - 'base_url': self.config.get('base_url', 'https://api.aitbc.io'), - 'timeout': self.config.get('timeout', 30), - 'api_key': self.config.get('api_key'), - 'default_headers': { - 'User-Agent': f'AITBC-Python-SDK/{self._get_version()}', - 'Content-Type': 'application/json' - } - } - - def _init_single_network(self, transport_config: Union[Transport, Dict[str, Any]]) -> None: - """Initialize single network client""" - if isinstance(transport_config, Transport): - self.transport = transport_config - else: - # Create transport from config - self.transport = self._create_transport(transport_config) - - self.multi_network = False - self.multi_network_client = None - - def _init_multi_network(self, configs: Dict[str, Any]) -> None: - """Initialize multi-network client""" - self.multi_network_client = MultiNetworkClient(configs) - self.multi_network = True - self.transport = None # Use multi_network_client instead - - def _create_transport(self, config: Dict[str, Any]) -> Transport: - """Create transport from configuration""" - transport_type = config.get('type', 'http') - - # Add API key to headers if provided - if 'api_key' in config and 'default_headers' not in config: - config['default_headers'] = { - 'X-API-Key': config['api_key'], - 'User-Agent': f'AITBC-Python-SDK/{self._get_version()}', - 'Content-Type': 'application/json' - } - - # Create base transport - if transport_type == 'http': - transport = HTTPTransport(config) - elif transport_type == 'websocket': - transport = WebSocketTransport(config) - elif transport_type == 'crosschain': - # Will be implemented later - raise NotImplementedError("CrossChain transport not yet implemented") - else: - raise ValueError(f"Unknown transport type: {transport_type}") - - # Apply mixins if enabled - if config.get('cached', False): - transport = CachedTransport(config) - - if config.get('rate_limited', False): - transport = RateLimitedTransport(config) - - if config.get('batch', False): - transport = BatchTransport(config) - - return transport - - def _init_apis(self) -> None: - """Initialize API clients""" - if self.multi_network: - # Multi-network APIs - self.jobs = MultiNetworkJobsAPI(self.multi_network_client) - self.settlement = MultiNetworkSettlementAPI(self.multi_network_client) - - # Single-network APIs (use default network) - default_transport = self.multi_network_client.get_transport() - self.marketplace = MarketplaceAPI(default_transport) - self.wallet = WalletAPI(default_transport) - self.receipts = ReceiptsAPI(default_transport) - else: - # Single-network APIs - self.jobs = JobsAPI(self.transport) - self.marketplace = MarketplaceAPI(self.transport) - self.wallet = WalletAPI(self.transport) - self.receipts = ReceiptsAPI(self.transport) - self.settlement = SettlementAPI(self.transport) - - async def connect(self) -> None: - """Connect to network(s)""" - if self.multi_network: - await self.multi_network_client.connect_all() - else: - await self.transport.connect() - - self._connected = True - logger.info("AITBC client connected") - - async def disconnect(self) -> None: - """Disconnect from network(s)""" - if self.multi_network: - await self.multi_network_client.disconnect_all() - elif self.transport: - await self.transport.disconnect() - - self._connected = False - logger.info("AITBC client disconnected") - - @property - def is_connected(self) -> bool: - """Check if client is connected""" - if self.multi_network: - return self.multi_network_client._connected - elif self.transport: - return self.transport.is_connected - return False - - # Multi-network methods - def add_network(self, network_config: NetworkConfig) -> None: - """Add a network (multi-network mode only)""" - if not self.multi_network: - raise RuntimeError("Multi-network mode not enabled") - - self.multi_network_client.add_network(network_config) - - def remove_network(self, chain_id: int) -> None: - """Remove a network (multi-network mode only)""" - if not self.multi_network: - raise RuntimeError("Multi-network mode not enabled") - - self.multi_network_client.remove_network(chain_id) - - def get_networks(self) -> List[NetworkConfig]: - """Get all configured networks""" - if not self.multi_network: - raise RuntimeError("Multi-network mode not enabled") - - return self.multi_network_client.list_networks() - - def set_default_network(self, chain_id: int) -> None: - """Set default network (multi-network mode only)""" - if not self.multi_network: - raise RuntimeError("Multi-network mode not enabled") - - self.multi_network_client.set_default_network(chain_id) - - async def switch_network(self, chain_id: int) -> None: - """Switch to a different network (multi-network mode only)""" - if not self.multi_network: - raise RuntimeError("Multi-network mode not enabled") - - await self.multi_network_client.switch_network(chain_id) - - async def health_check(self) -> Union[bool, Dict[int, bool]]: - """Check health of connection(s)""" - if self.multi_network: - return await self.multi_network_client.health_check_all() - elif self.transport: - return await self.transport.health_check() - return False - - # Backward compatibility methods - def get_api_key(self) -> Optional[str]: - """Get API key (backward compatibility)""" - if self.multi_network: - # Get from default network - default_network = self.multi_network_client.get_default_network() - if default_network: - return default_network.transport.get_config('api_key') - elif self.transport: - return self.transport.get_config('api_key') - return None - - def set_api_key(self, api_key: str) -> None: - """Set API key (backward compatibility)""" - if self.multi_network: - # Update all networks - for network in self.multi_network_client.networks.values(): - network.transport.update_config({'api_key': api_key}) - elif self.transport: - self.transport.update_config({'api_key': api_key}) - - def get_base_url(self) -> Optional[str]: - """Get base URL (backward compatibility)""" - if self.multi_network: - default_network = self.multi_network_client.get_default_network() - if default_network: - return default_network.transport.get_config('base_url') - elif self.transport: - return self.transport.get_config('base_url') - return None - - # Utility methods - def _get_version(self) -> str: - """Get SDK version""" - try: - from . import __version__ - return __version__ - except ImportError: - return "1.0.0" - - def get_stats(self) -> Dict[str, Any]: - """Get client statistics""" - stats = { - 'multi_network': self.multi_network, - 'connected': self._connected, - 'version': self._get_version() - } - - if self.multi_network: - stats['networks'] = self.multi_network_client.get_network_stats() - elif self.transport: - if hasattr(self.transport, 'get_stats'): - stats['transport'] = self.transport.get_stats() - else: - stats['transport'] = { - 'connected': self.transport.is_connected, - 'chain_id': self.transport.chain_id - } - - return stats - - # Context managers - async def __aenter__(self): - """Async context manager entry""" - await self.connect() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Async context manager exit""" - await self.disconnect() - - -# Convenience functions for backward compatibility -def create_client( - api_key: Optional[str] = None, - base_url: Optional[str] = None, - timeout: Optional[int] = None, - transport: Optional[Union[Transport, str]] = None, - **kwargs -) -> AITBCClient: - """ - Create AITBC client with backward-compatible interface - - Args: - api_key: API key for authentication - base_url: Base URL for the API - timeout: Request timeout in seconds - transport: Transport type ('http', 'websocket') or Transport instance - **kwargs: Additional configuration options - - Returns: - AITBCClient instance - """ - config = {} - - # Build configuration - if api_key: - config['api_key'] = api_key - if base_url: - config['base_url'] = base_url - if timeout: - config['timeout'] = timeout - - # Add other config - config.update(kwargs) - - # Handle transport parameter - if isinstance(transport, Transport): - return AITBCClient(transport=transport, config=config) - elif transport: - config['type'] = transport - - return AITBCClient(transport=config, config=config) - - -def create_multi_network_client( - networks: Dict[str, Dict[str, Any]], - default_network: Optional[str] = None, - **kwargs -) -> AITBCClient: - """ - Create multi-network AITBC client - - Args: - networks: Dictionary of network configurations - default_network: Name of default network - **kwargs: Additional configuration options - - Returns: - AITBCClient instance with multi-network support - """ - config = { - 'networks': networks, - **kwargs - } - - client = AITBCClient(multi_network=True, config=config) - - # Set default network if specified - if default_network: - network = client.multi_network_client.find_network_by_name(default_network) - if network: - client.set_default_network(network.chain_id) - - return client - - -# Legacy aliases for backward compatibility -Client = AITBCClient diff --git a/python-sdk/aitbc/transport/__init__.py b/python-sdk/aitbc/transport/__init__.py deleted file mode 100644 index 38008a15..00000000 --- a/python-sdk/aitbc/transport/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Transport layer for AITBC Python SDK -""" - -from .base import Transport, TransportError -from .http import HTTPTransport -from .websocket import WebSocketTransport -from .multinetwork import MultiNetworkClient, NetworkConfig - -__all__ = [ - "Transport", - "TransportError", - "HTTPTransport", - "WebSocketTransport", - "MultiNetworkClient", - "NetworkConfig", -] diff --git a/python-sdk/aitbc/transport/base.py b/python-sdk/aitbc/transport/base.py deleted file mode 100644 index 540c98e3..00000000 --- a/python-sdk/aitbc/transport/base.py +++ /dev/null @@ -1,264 +0,0 @@ -""" -Base transport interface for AITBC Python SDK -""" - -from abc import ABC, abstractmethod -from typing import Dict, Any, Optional, AsyncIterator, Union, List -import asyncio -import logging -from datetime import timedelta - -logger = logging.getLogger(__name__) - - -class TransportError(Exception): - """Base exception for transport errors""" - pass - - -class TransportConnectionError(TransportError): - """Raised when transport fails to connect""" - pass - - -class TransportRequestError(TransportError): - """Raised when transport request fails""" - def __init__(self, message: str, status_code: Optional[int] = None, response: Optional[Dict[str, Any]] = None): - super().__init__(message) - self.status_code = status_code - self.response = response - - -class Transport(ABC): - """Abstract base class for all transports""" - - def __init__(self, config: Dict[str, Any]): - self.config = config - self._connected = False - self._lock = asyncio.Lock() - self._connection_attempts = 0 - self._max_connection_attempts = config.get('max_connection_attempts', 3) - self._retry_delay = config.get('retry_delay', 1) - - @abstractmethod - async def connect(self) -> None: - """Establish connection""" - pass - - @abstractmethod - async def disconnect(self) -> None: - """Close connection""" - pass - - @abstractmethod - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Make a request""" - pass - - @abstractmethod - async def stream( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None - ) -> AsyncIterator[Dict[str, Any]]: - """Stream responses""" - pass - - async def health_check(self) -> bool: - """Check if transport is healthy""" - try: - if not self._connected: - return False - - # Default health check - make a ping request - await self.request('GET', '/health') - return True - except Exception as e: - logger.warning(f"Transport health check failed: {e}") - return False - - async def ensure_connected(self) -> None: - """Ensure transport is connected, with retry logic""" - async with self._lock: - if self._connected: - return - - while self._connection_attempts < self._max_connection_attempts: - try: - await self.connect() - self._connection_attempts = 0 - return - except Exception as e: - self._connection_attempts += 1 - logger.warning(f"Connection attempt {self._connection_attempts} failed: {e}") - - if self._connection_attempts < self._max_connection_attempts: - await asyncio.sleep(self._retry_delay * self._connection_attempts) - else: - raise TransportConnectionError( - f"Failed to connect after {self._max_connection_attempts} attempts" - ) - - @property - def is_connected(self) -> bool: - """Check if transport is connected""" - return self._connected - - @property - def chain_id(self) -> Optional[int]: - """Get the chain ID this transport is connected to""" - return self.config.get('chain_id') - - @property - def network_name(self) -> Optional[str]: - """Get the network name""" - return self.config.get('network_name') - - def get_config(self, key: str, default: Any = None) -> Any: - """Get configuration value""" - return self.config.get(key, default) - - def update_config(self, updates: Dict[str, Any]) -> None: - """Update configuration""" - self.config.update(updates) - - async def __aenter__(self): - """Async context manager entry""" - await self.connect() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Async context manager exit""" - await self.disconnect() - - -class BatchTransport(Transport): - """Transport mixin for batch operations""" - - @abstractmethod - async def batch_request( - self, - requests: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: - """Make multiple requests in batch""" - pass - - -class CachedTransport(Transport): - """Transport mixin for caching responses""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self._cache: Dict[str, Any] = {} - self._cache_ttl = config.get('cache_ttl', 300) # 5 minutes - self._cache_timestamps: Dict[str, float] = {} - - async def cached_request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - cache_key: Optional[str] = None - ) -> Dict[str, Any]: - """Make request with caching""" - # Only cache GET requests - if method.upper() != 'GET': - return await self.request(method, path, data, params, headers) - - # Generate cache key - if not cache_key: - import hashlib - import json - cache_data = json.dumps({ - 'method': method, - 'path': path, - 'params': params - }, sort_keys=True) - cache_key = hashlib.md5(cache_data.encode()).hexdigest() - - # Check cache - if cache_key in self._cache: - timestamp = self._cache_timestamps.get(cache_key, 0) - if asyncio.get_event_loop().time() - timestamp < self._cache_ttl: - return self._cache[cache_key] - - # Make request - response = await self.request(method, path, data, params, headers) - - # Cache response - self._cache[cache_key] = response - self._cache_timestamps[cache_key] = asyncio.get_event_loop().time() - - return response - - def clear_cache(self, pattern: Optional[str] = None) -> None: - """Clear cached responses""" - if pattern: - import re - regex = re.compile(pattern) - keys_to_remove = [k for k in self._cache.keys() if regex.match(k)] - for key in keys_to_remove: - del self._cache[key] - if key in self._cache_timestamps: - del self._cache_timestamps[key] - else: - self._cache.clear() - self._cache_timestamps.clear() - - -class RateLimitedTransport(Transport): - """Transport mixin for rate limiting""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self._rate_limit = config.get('rate_limit', 60) # requests per minute - self._rate_window = config.get('rate_window', 60) # seconds - self._requests: List[float] = [] - self._rate_lock = asyncio.Lock() - - async def _check_rate_limit(self) -> None: - """Check if request is within rate limit""" - async with self._rate_lock: - now = asyncio.get_event_loop().time() - - # Remove old requests outside the window - self._requests = [req_time for req_time in self._requests - if now - req_time < self._rate_window] - - # Check if we're at the limit - if len(self._requests) >= self._rate_limit: - # Calculate wait time - oldest_request = min(self._requests) - wait_time = self._rate_window - (now - oldest_request) - - if wait_time > 0: - logger.warning(f"Rate limit reached, waiting {wait_time:.2f} seconds") - await asyncio.sleep(wait_time) - - # Add current request - self._requests.append(now) - - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Make request with rate limiting""" - await self._check_rate_limit() - return await super().request(method, path, data, params, headers, timeout) diff --git a/python-sdk/aitbc/transport/http.py b/python-sdk/aitbc/transport/http.py deleted file mode 100644 index 16fee840..00000000 --- a/python-sdk/aitbc/transport/http.py +++ /dev/null @@ -1,405 +0,0 @@ -""" -HTTP transport implementation for AITBC Python SDK -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Optional, AsyncIterator, Union -from datetime import datetime, timedelta - -import aiohttp -from aiohttp import ClientTimeout, ClientError, ClientResponseError - -from .base import Transport, TransportError, TransportConnectionError, TransportRequestError - -logger = logging.getLogger(__name__) - - -class HTTPTransport(Transport): - """HTTP transport for REST API calls""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self.base_url = config['base_url'].rstrip('/') - self.session: Optional[aiohttp.ClientSession] = None - self.timeout = ClientTimeout( - total=config.get('timeout', 30), - connect=config.get('connect_timeout', 10), - sock_read=config.get('read_timeout', 30) - ) - self.default_headers = config.get('default_headers', {}) - self.max_redirects = config.get('max_redirects', 10) - self.verify_ssl = config.get('verify_ssl', True) - self._last_request_time: Optional[float] = None - - async def connect(self) -> None: - """Create HTTP session""" - try: - # Configure SSL context - ssl_context = None - if not self.verify_ssl: - import ssl - ssl_context = ssl.create_default_context() - ssl_context.check_hostname = False - ssl_context.verify_mode = ssl.CERT_NONE - - # Create connector - connector = aiohttp.TCPConnector( - limit=self.config.get('connection_limit', 100), - limit_per_host=self.config.get('connection_limit_per_host', 30), - ttl_dns_cache=self.config.get('dns_cache_ttl', 300), - use_dns_cache=True, - ssl=ssl_context, - enable_cleanup_closed=True - ) - - # Create session - self.session = aiohttp.ClientSession( - connector=connector, - timeout=self.timeout, - headers=self.default_headers, - max_redirects=self.max_redirects, - raise_for_status=False # We'll handle status codes manually - ) - - # Test connection with health check - await self.health_check() - self._connected = True - logger.info(f"HTTP transport connected to {self.base_url}") - - except Exception as e: - logger.error(f"Failed to connect HTTP transport: {e}") - raise TransportConnectionError(f"Connection failed: {e}") - - async def disconnect(self) -> None: - """Close HTTP session""" - if self.session: - await self.session.close() - self.session = None - self._connected = False - logger.info("HTTP transport disconnected") - - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Make HTTP request""" - await self.ensure_connected() - - if not self.session: - raise TransportConnectionError("Transport not connected") - - # Prepare URL - url = f"{self.base_url}{path}" - - # Prepare headers - request_headers = {} - if self.default_headers: - request_headers.update(self.default_headers) - if headers: - request_headers.update(headers) - - # Add content-type if data is provided - if data and 'content-type' not in request_headers: - request_headers['content-type'] = 'application/json' - - # Prepare request timeout - request_timeout = self.timeout - if timeout: - request_timeout = ClientTimeout(total=timeout) - - # Log request - logger.debug(f"HTTP {method} {url}") - - try: - # Make request - async with self.session.request( - method=method.upper(), - url=url, - json=data if data and request_headers.get('content-type') == 'application/json' else None, - data=data if data and request_headers.get('content-type') != 'application/json' else None, - params=params, - headers=request_headers, - timeout=request_timeout - ) as response: - # Record request time - self._last_request_time = asyncio.get_event_loop().time() - - # Handle response - await self._handle_response(response) - - # Parse response - if response.content_type == 'application/json': - result = await response.json() - else: - result = {'data': await response.text()} - - # Add metadata - result['_metadata'] = { - 'status_code': response.status, - 'headers': dict(response.headers), - 'url': str(response.url) - } - - return result - - except ClientResponseError as e: - raise TransportRequestError( - f"HTTP {e.status}: {e.message}", - status_code=e.status, - response={'error': e.message} - ) - except ClientError as e: - raise TransportError(f"HTTP request failed: {e}") - except asyncio.TimeoutError: - raise TransportError("Request timed out") - except Exception as e: - raise TransportError(f"Unexpected error: {e}") - - async def stream( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None - ) -> AsyncIterator[Dict[str, Any]]: - """Stream responses (not supported for basic HTTP)""" - raise NotImplementedError("HTTP transport does not support streaming") - - async def download( - self, - path: str, - file_path: str, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - chunk_size: int = 8192 - ) -> None: - """Download file to disk""" - await self.ensure_connected() - - if not self.session: - raise TransportConnectionError("Transport not connected") - - url = f"{self.base_url}{path}" - - try: - async with self.session.get( - url, - params=params, - headers=headers - ) as response: - await self._handle_response(response) - - # Stream to file - with open(file_path, 'wb') as f: - async for chunk in response.content.iter_chunked(chunk_size): - f.write(chunk) - - logger.info(f"Downloaded {url} to {file_path}") - - except Exception as e: - raise TransportError(f"Download failed: {e}") - - async def upload( - self, - path: str, - file_path: str, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - chunk_size: int = 8192 - ) -> Dict[str, Any]: - """Upload file from disk""" - await self.ensure_connected() - - if not self.session: - raise TransportConnectionError("Transport not connected") - - url = f"{self.base_url}{path}" - - try: - # Prepare multipart form data - with open(file_path, 'rb') as f: - data = aiohttp.FormData() - data.add_field( - 'file', - f, - filename=file_path.split('/')[-1], - content_type='application/octet-stream' - ) - - # Add additional fields - if params: - for key, value in params.items(): - data.add_field(key, str(value)) - - async with self.session.post( - url, - data=data, - headers=headers - ) as response: - await self._handle_response(response) - - if response.content_type == 'application/json': - return await response.json() - else: - return {'status': 'uploaded'} - - except Exception as e: - raise TransportError(f"Upload failed: {e}") - - async def _handle_response(self, response: aiohttp.ClientResponse) -> None: - """Handle HTTP response""" - if response.status >= 400: - error_data = {} - - try: - if response.content_type == 'application/json': - error_data = await response.json() - else: - error_data = {'error': await response.text()} - except: - error_data = {'error': f'HTTP {response.status}'} - - raise TransportRequestError( - error_data.get('error', f'HTTP {response.status}'), - status_code=response.status, - response=error_data - ) - - def get_stats(self) -> Dict[str, Any]: - """Get transport statistics""" - stats = { - 'connected': self._connected, - 'base_url': self.base_url, - 'last_request_time': self._last_request_time - } - - if self.session: - # Get connector stats - connector = self.session.connector - stats.update({ - 'total_connections': len(connector._conns), - 'available_connections': sum(len(conns) for conns in connector._conns.values()) - }) - - return stats - - -class AuthenticatedHTTPTransport(HTTPTransport): - """HTTP transport with authentication""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self.auth_type = config.get('auth_type', 'api_key') - self.auth_config = config.get('auth', {}) - - async def _add_auth_headers(self, headers: Dict[str, str]) -> Dict[str, str]: - """Add authentication headers""" - headers = headers.copy() - - if self.auth_type == 'api_key': - api_key = self.auth_config.get('api_key') - if api_key: - key_header = self.auth_config.get('key_header', 'X-API-Key') - headers[key_header] = api_key - - elif self.auth_type == 'bearer': - token = self.auth_config.get('token') - if token: - headers['Authorization'] = f'Bearer {token}' - - elif self.auth_type == 'basic': - username = self.auth_config.get('username') - password = self.auth_config.get('password') - if username and password: - import base64 - credentials = base64.b64encode(f"{username}:{password}".encode()).decode() - headers['Authorization'] = f'Basic {credentials}' - - return headers - - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Make authenticated HTTP request""" - # Add auth headers - auth_headers = await self._add_auth_headers(headers or {}) - - return await super().request( - method, path, data, params, auth_headers, timeout - ) - - -class RetryableHTTPTransport(HTTPTransport): - """HTTP transport with automatic retry""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self.max_retries = config.get('max_retries', 3) - self.retry_delay = config.get('retry_delay', 1) - self.retry_backoff = config.get('retry_backoff', 2) - self.retry_on = config.get('retry_on', [500, 502, 503, 504]) - - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Make HTTP request with retry logic""" - last_error = None - - for attempt in range(self.max_retries + 1): - try: - return await super().request( - method, path, data, params, headers, timeout - ) - - except TransportRequestError as e: - last_error = e - - # Check if we should retry - if attempt < self.max_retries and e.status_code in self.retry_on: - delay = self.retry_delay * (self.retry_backoff ** attempt) - logger.warning( - f"Request failed (attempt {attempt + 1}/{self.max_retries + 1}), " - f"retrying in {delay}s: {e}" - ) - await asyncio.sleep(delay) - continue - - # Don't retry on client errors or final attempt - break - - except TransportError as e: - last_error = e - - # Retry on connection errors - if attempt < self.max_retries: - delay = self.retry_delay * (self.retry_backoff ** attempt) - logger.warning( - f"Request failed (attempt {attempt + 1}/{self.max_retries + 1}), " - f"retrying in {delay}s: {e}" - ) - await asyncio.sleep(delay) - continue - - break - - # All retries failed - raise last_error diff --git a/python-sdk/aitbc/transport/multinetwork.py b/python-sdk/aitbc/transport/multinetwork.py deleted file mode 100644 index 5e381cd1..00000000 --- a/python-sdk/aitbc/transport/multinetwork.py +++ /dev/null @@ -1,377 +0,0 @@ -""" -Multi-network support for AITBC Python SDK -""" - -import asyncio -import logging -from typing import Dict, Any, Optional, List, Union -from dataclasses import dataclass, field -from datetime import datetime - -from .base import Transport, TransportError, TransportConnectionError -from .http import HTTPTransport -from .websocket import WebSocketTransport - -logger = logging.getLogger(__name__) - - -@dataclass -class NetworkConfig: - """Configuration for a network""" - name: str - chain_id: int - transport: Transport - is_default: bool = False - bridges: List[str] = field(default_factory=list) - explorer_url: Optional[str] = None - rpc_url: Optional[str] = None - native_token: str = "ETH" - gas_token: Optional[str] = None - - -class MultiNetworkClient: - """Client supporting multiple networks and cross-chain operations""" - - def __init__(self, config: Optional[Dict[str, Any]] = None): - self.networks: Dict[int, NetworkConfig] = {} - self.default_network: Optional[int] = None - self._connected = False - self._connection_lock = asyncio.Lock() - - if config: - self._load_config(config) - - def _load_config(self, config: Dict[str, Any]) -> None: - """Load network configurations""" - networks_config = config.get('networks', {}) - - for name, net_config in networks_config.items(): - # Create transport - transport = self._create_transport(net_config) - - # Create network config - network = NetworkConfig( - name=name, - chain_id=net_config['chain_id'], - transport=transport, - is_default=net_config.get('default', False), - bridges=net_config.get('bridges', []), - explorer_url=net_config.get('explorer_url'), - rpc_url=net_config.get('rpc_url'), - native_token=net_config.get('native_token', 'ETH'), - gas_token=net_config.get('gas_token') - ) - - self.add_network(network) - - def _create_transport(self, config: Dict[str, Any]) -> Transport: - """Create transport from config""" - transport_type = config.get('type', 'http') - transport_config = config.copy() - - if transport_type == 'http': - return HTTPTransport(transport_config) - elif transport_type == 'websocket': - return WebSocketTransport(transport_config) - else: - raise ValueError(f"Unknown transport type: {transport_type}") - - def add_network(self, network: NetworkConfig) -> None: - """Add a network configuration""" - if network.chain_id in self.networks: - logger.warning(f"Network {network.chain_id} already exists, overwriting") - - self.networks[network.chain_id] = network - - # Set as default if marked or if no default exists - if network.is_default or self.default_network is None: - self.default_network = network.chain_id - - logger.info(f"Added network: {network.name} (chain_id: {network.chain_id})") - - def remove_network(self, chain_id: int) -> None: - """Remove a network configuration""" - if chain_id in self.networks: - network = self.networks[chain_id] - - # Disconnect if connected - if network.transport.is_connected: - asyncio.create_task(network.transport.disconnect()) - - del self.networks[chain_id] - - # Update default if necessary - if self.default_network == chain_id: - self.default_network = None - # Set new default if other networks exist - if self.networks: - self.default_network = next(iter(self.networks)) - - logger.info(f"Removed network: {network.name} (chain_id: {chain_id})") - - def get_transport(self, chain_id: Optional[int] = None) -> Transport: - """Get transport for a network""" - network_id = chain_id or self.default_network - - if network_id is None: - raise ValueError("No default network configured") - - if network_id not in self.networks: - raise ValueError(f"Network {network_id} not configured") - - return self.networks[network_id].transport - - def get_network(self, chain_id: int) -> Optional[NetworkConfig]: - """Get network configuration""" - return self.networks.get(chain_id) - - def list_networks(self) -> List[NetworkConfig]: - """List all configured networks""" - return list(self.networks.values()) - - def get_default_network(self) -> Optional[NetworkConfig]: - """Get default network configuration""" - if self.default_network: - return self.networks.get(self.default_network) - return None - - def set_default_network(self, chain_id: int) -> None: - """Set default network""" - if chain_id not in self.networks: - raise ValueError(f"Network {chain_id} not configured") - - self.default_network = chain_id - - # Update all networks' default flag - for net in self.networks.values(): - net.is_default = (net.chain_id == chain_id) - - async def connect_all(self) -> None: - """Connect to all configured networks""" - async with self._connection_lock: - if self._connected: - return - - logger.info(f"Connecting to {len(self.networks)} networks...") - - # Connect all transports - tasks = [] - for chain_id, network in self.networks.items(): - task = asyncio.create_task( - self._connect_network(network), - name=f"connect_{network.name}" - ) - tasks.append(task) - - # Wait for all connections - results = await asyncio.gather(*tasks, return_exceptions=True) - - # Check for errors - errors = [] - for i, result in enumerate(results): - if isinstance(result, Exception): - network_name = list(self.networks.values())[i].name - errors.append(f"{network_name}: {result}") - logger.error(f"Failed to connect to {network_name}: {result}") - - if errors: - raise TransportConnectionError( - f"Failed to connect to some networks: {'; '.join(errors)}" - ) - - self._connected = True - logger.info("Connected to all networks") - - async def disconnect_all(self) -> None: - """Disconnect from all networks""" - async with self._connection_lock: - if not self._connected: - return - - logger.info("Disconnecting from all networks...") - - # Disconnect all transports - tasks = [] - for network in self.networks.values(): - if network.transport.is_connected: - task = asyncio.create_task( - network.transport.disconnect(), - name=f"disconnect_{network.name}" - ) - tasks.append(task) - - if tasks: - await asyncio.gather(*tasks, return_exceptions=True) - - self._connected = False - logger.info("Disconnected from all networks") - - async def connect_network(self, chain_id: int) -> None: - """Connect to a specific network""" - network = self.networks.get(chain_id) - if not network: - raise ValueError(f"Network {chain_id} not configured") - - await self._connect_network(network) - - async def disconnect_network(self, chain_id: int) -> None: - """Disconnect from a specific network""" - network = self.networks.get(chain_id) - if not network: - raise ValueError(f"Network {chain_id} not configured") - - if network.transport.is_connected: - await network.transport.disconnect() - - async def _connect_network(self, network: NetworkConfig) -> None: - """Connect to a specific network""" - try: - if not network.transport.is_connected: - await network.transport.connect() - logger.info(f"Connected to {network.name}") - except Exception as e: - logger.error(f"Failed to connect to {network.name}: {e}") - raise - - async def switch_network(self, chain_id: int) -> None: - """Switch default network""" - if chain_id not in self.networks: - raise ValueError(f"Network {chain_id} not configured") - - # Connect if not connected - network = self.networks[chain_id] - if not network.transport.is_connected: - await self._connect_network(network) - - # Set as default - self.set_default_network(chain_id) - logger.info(f"Switched to network: {network.name}") - - async def health_check_all(self) -> Dict[int, bool]: - """Check health of all networks""" - results = {} - - for chain_id, network in self.networks.items(): - try: - results[chain_id] = await network.transport.health_check() - except Exception as e: - logger.warning(f"Health check failed for {network.name}: {e}") - results[chain_id] = False - - return results - - async def broadcast_request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - chain_ids: Optional[List[int]] = None - ) -> Dict[int, Dict[str, Any]]: - """Broadcast request to multiple networks""" - if chain_ids is None: - chain_ids = list(self.networks.keys()) - - results = {} - - # Make requests in parallel - tasks = {} - for chain_id in chain_ids: - if chain_id in self.networks: - transport = self.networks[chain_id].transport - task = asyncio.create_task( - transport.request(method, path, data, params, headers), - name=f"request_{chain_id}" - ) - tasks[chain_id] = task - - # Wait for all requests - for chain_id, task in tasks.items(): - try: - results[chain_id] = await task - except Exception as e: - network_name = self.networks[chain_id].name - logger.error(f"Request failed for {network_name}: {e}") - results[chain_id] = {'error': str(e)} - - return results - - def get_network_stats(self) -> Dict[int, Dict[str, Any]]: - """Get statistics for all networks""" - stats = {} - - for chain_id, network in self.networks.items(): - network_stats = { - 'name': network.name, - 'chain_id': network.chain_id, - 'is_default': network.is_default, - 'bridges': network.bridges, - 'explorer_url': network.explorer_url, - 'rpc_url': network.rpc_url, - 'native_token': network.native_token, - 'gas_token': network.gas_token - } - - # Add transport stats if available - if hasattr(network.transport, 'get_stats'): - network_stats['transport'] = network.transport.get_stats() - - stats[chain_id] = network_stats - - return stats - - def find_network_by_name(self, name: str) -> Optional[NetworkConfig]: - """Find network by name""" - for network in self.networks.values(): - if network.name == name: - return network - return None - - def find_networks_by_bridge(self, bridge: str) -> List[NetworkConfig]: - """Find networks that support a specific bridge""" - return [ - network for network in self.networks.values() - if bridge in network.bridges - ] - - async def __aenter__(self): - """Async context manager entry""" - await self.connect_all() - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Async context manager exit""" - await self.disconnect_all() - - -class NetworkSwitcher: - """Utility for switching between networks""" - - def __init__(self, client: MultiNetworkClient): - self.client = client - self._original_default: Optional[int] = None - - async def __aenter__(self): - """Store original default network""" - self._original_default = self.client.default_network - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - """Restore original default network""" - if self._original_default: - await self.client.switch_network(self._original_default) - - async def switch_to(self, chain_id: int): - """Switch to specific network""" - await self.client.switch_network(chain_id) - return self - - async def switch_to_name(self, name: str): - """Switch to network by name""" - network = self.client.find_network_by_name(name) - if not network: - raise ValueError(f"Network {name} not found") - - await self.switch_to(network.chain_id) - return self diff --git a/python-sdk/aitbc/transport/websocket.py b/python-sdk/aitbc/transport/websocket.py deleted file mode 100644 index 5d426651..00000000 --- a/python-sdk/aitbc/transport/websocket.py +++ /dev/null @@ -1,449 +0,0 @@ -""" -WebSocket transport implementation for AITBC Python SDK -""" - -import asyncio -import json -import logging -from typing import Dict, Any, Optional, AsyncIterator, Callable -from datetime import datetime - -import websockets -from websockets.exceptions import ConnectionClosed, ConnectionClosedError, ConnectionClosedOK - -from .base import Transport, TransportError, TransportConnectionError, TransportRequestError - -logger = logging.getLogger(__name__) - - -class WebSocketTransport(Transport): - """WebSocket transport for real-time updates""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self.ws_url = config['ws_url'] - self.websocket: Optional[websockets.WebSocketClientProtocol] = None - self._subscriptions: Dict[str, Dict[str, Any]] = {} - self._message_handlers: Dict[str, Callable] = {} - self._message_queue = asyncio.Queue() - self._consumer_task: Optional[asyncio.Task] = None - self._heartbeat_interval = config.get('heartbeat_interval', 30) - self._heartbeat_task: Optional[asyncio.Task] = None - self._reconnect_enabled = config.get('reconnect', True) - self._max_reconnect_attempts = config.get('max_reconnect_attempts', 5) - self._reconnect_delay = config.get('reconnect_delay', 5) - self._ping_timeout = config.get('ping_timeout', 20) - self._close_code: Optional[int] = None - self._close_reason: Optional[str] = None - - async def connect(self) -> None: - """Connect to WebSocket""" - try: - # Prepare connection parameters - extra_headers = self.config.get('headers', {}) - ping_interval = self.config.get('ping_interval', self._heartbeat_interval) - ping_timeout = self._ping_timeout - - # Connect to WebSocket - logger.info(f"Connecting to WebSocket: {self.ws_url}") - self.websocket = await websockets.connect( - self.ws_url, - extra_headers=extra_headers, - ping_interval=ping_interval, - ping_timeout=ping_timeout, - close_timeout=self.config.get('close_timeout', 10) - ) - - # Start consumer task - self._consumer_task = asyncio.create_task(self._consume_messages()) - - # Start heartbeat task - self._heartbeat_task = asyncio.create_task(self._heartbeat()) - - self._connected = True - logger.info("WebSocket transport connected") - - except Exception as e: - logger.error(f"Failed to connect WebSocket: {e}") - raise TransportConnectionError(f"WebSocket connection failed: {e}") - - async def disconnect(self) -> None: - """Disconnect WebSocket""" - self._connected = False - - # Cancel tasks - if self._consumer_task: - self._consumer_task.cancel() - try: - await self._consumer_task - except asyncio.CancelledError: - pass - - if self._heartbeat_task: - self._heartbeat_task.cancel() - try: - await self._heartbeat_task - except asyncio.CancelledError: - pass - - # Close WebSocket - if self.websocket: - try: - await self.websocket.close() - except Exception as e: - logger.warning(f"Error closing WebSocket: {e}") - finally: - self.websocket = None - - logger.info("WebSocket transport disconnected") - - async def request( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None, - timeout: Optional[float] = None - ) -> Dict[str, Any]: - """Send request via WebSocket""" - await self.ensure_connected() - - if not self.websocket: - raise TransportConnectionError("WebSocket not connected") - - # Generate request ID - request_id = self._generate_id() - - # Create message - message = { - 'id': request_id, - 'type': 'request', - 'method': method, - 'path': path, - 'data': data, - 'params': params, - 'timestamp': datetime.utcnow().isoformat() - } - - # Send request - await self._send_message(message) - - # Wait for response - timeout = timeout or self.config.get('request_timeout', 30) - - try: - response = await asyncio.wait_for( - self._wait_for_response(request_id), - timeout=timeout - ) - return response - except asyncio.TimeoutError: - raise TransportError(f"Request timed out after {timeout}s") - - async def stream( - self, - method: str, - path: str, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, str]] = None - ) -> AsyncIterator[Dict[str, Any]]: - """Stream responses from WebSocket""" - await self.ensure_connected() - - # Create subscription - subscription_id = self._generate_id() - - # Subscribe - message = { - 'id': subscription_id, - 'type': 'subscribe', - 'method': method, - 'path': path, - 'data': data, - 'timestamp': datetime.utcnow().isoformat() - } - - await self._send_message(message) - - # Store subscription - self._subscriptions[subscription_id] = { - 'method': method, - 'path': path, - 'created_at': datetime.utcnow() - } - - try: - # Yield messages as they come - async for message in self._stream_subscription(subscription_id): - yield message - finally: - # Unsubscribe - await self._unsubscribe(subscription_id) - - async def subscribe( - self, - event: str, - callback: Callable[[Dict[str, Any]], None], - data: Optional[Dict[str, Any]] = None - ) -> str: - """Subscribe to events""" - await self.ensure_connected() - - subscription_id = self._generate_id() - - # Store subscription with callback - self._subscriptions[subscription_id] = { - 'event': event, - 'callback': callback, - 'data': data, - 'created_at': datetime.utcnow() - } - - # Send subscription message - message = { - 'id': subscription_id, - 'type': 'subscribe', - 'event': event, - 'data': data, - 'timestamp': datetime.utcnow().isoformat() - } - - await self._send_message(message) - - logger.info(f"Subscribed to event: {event}") - return subscription_id - - async def unsubscribe(self, subscription_id: str) -> None: - """Unsubscribe from events""" - if subscription_id in self._subscriptions: - # Send unsubscribe message - message = { - 'id': subscription_id, - 'type': 'unsubscribe', - 'timestamp': datetime.utcnow().isoformat() - } - - await self._send_message(message) - - # Remove subscription - del self._subscriptions[subscription_id] - - logger.info(f"Unsubscribed: {subscription_id}") - - async def emit(self, event: str, data: Optional[Dict[str, Any]] = None) -> None: - """Emit event to server""" - await self.ensure_connected() - - message = { - 'type': 'event', - 'event': event, - 'data': data, - 'timestamp': datetime.utcnow().isoformat() - } - - await self._send_message(message) - - async def _send_message(self, message: Dict[str, Any]) -> None: - """Send message to WebSocket""" - if not self.websocket: - raise TransportConnectionError("WebSocket not connected") - - try: - await self.websocket.send(json.dumps(message)) - logger.debug(f"Sent WebSocket message: {message.get('type', 'unknown')}") - except ConnectionClosed: - await self._handle_disconnect() - raise TransportConnectionError("WebSocket connection closed") - except Exception as e: - raise TransportError(f"Failed to send message: {e}") - - async def _consume_messages(self) -> None: - """Consume messages from WebSocket""" - while self._connected: - try: - # Wait for message - message = await asyncio.wait_for( - self.websocket.recv(), - timeout=self._heartbeat_interval * 2 - ) - - # Parse message - try: - data = json.loads(message) - except json.JSONDecodeError: - logger.error(f"Invalid JSON message: {message}") - continue - - # Handle message - await self._handle_message(data) - - except asyncio.TimeoutError: - # No message received, check connection - continue - except ConnectionClosedOK: - logger.info("WebSocket closed normally") - break - except ConnectionClosedError as e: - logger.warning(f"WebSocket connection closed: {e}") - await self._handle_disconnect() - break - except Exception as e: - logger.error(f"Error consuming message: {e}") - break - - async def _handle_message(self, data: Dict[str, Any]) -> None: - """Handle incoming message""" - message_type = data.get('type') - - if message_type == 'response': - # Request response - await self._message_queue.put(data) - - elif message_type == 'event': - # Event message - await self._handle_event(data) - - elif message_type == 'subscription': - # Subscription update - await self._handle_subscription_update(data) - - elif message_type == 'error': - # Error message - logger.error(f"WebSocket error: {data.get('message')}") - - else: - logger.warning(f"Unknown message type: {message_type}") - - async def _handle_event(self, data: Dict[str, Any]) -> None: - """Handle event message""" - event = data.get('event') - event_data = data.get('data') - - # Find matching subscriptions - for sub_id, sub in self._subscriptions.items(): - if sub.get('event') == event: - callback = sub.get('callback') - if callback: - try: - if asyncio.iscoroutinefunction(callback): - await callback(event_data) - else: - callback(event_data) - except Exception as e: - logger.error(f"Error in event callback: {e}") - - async def _handle_subscription_update(self, data: Dict[str, Any]) -> None: - """Handle subscription update""" - subscription_id = data.get('subscription_id') - status = data.get('status') - - if subscription_id in self._subscriptions: - sub = self._subscriptions[subscription_id] - sub['status'] = status - - if status == 'confirmed': - logger.info(f"Subscription confirmed: {subscription_id}") - elif status == 'error': - logger.error(f"Subscription error: {subscription_id}") - - async def _wait_for_response(self, request_id: str) -> Dict[str, Any]: - """Wait for specific response""" - while True: - message = await self._message_queue.get() - - if message.get('id') == request_id: - if message.get('type') == 'error': - raise TransportRequestError( - message.get('message', 'Request failed') - ) - return message - - async def _stream_subscription(self, subscription_id: str) -> AsyncIterator[Dict[str, Any]]: - """Stream messages for subscription""" - queue = asyncio.Queue() - - # Add queue to subscriptions - if subscription_id in self._subscriptions: - self._subscriptions[subscription_id]['queue'] = queue - - try: - while True: - message = await queue.get() - if message.get('type') == 'unsubscribe': - break - yield message - finally: - # Clean up queue - if subscription_id in self._subscriptions: - self._subscriptions[subscription_id].pop('queue', None) - - async def _unsubscribe(self, subscription_id: str) -> None: - """Unsubscribe and clean up""" - await self.unsubscribe(subscription_id) - - async def _heartbeat(self) -> None: - """Send periodic heartbeat""" - while self._connected: - try: - await asyncio.sleep(self._heartbeat_interval) - - if self.websocket and self._connected: - # Send ping - await self.websocket.ping() - - except Exception as e: - logger.warning(f"Heartbeat failed: {e}") - break - - async def _handle_disconnect(self) -> None: - """Handle unexpected disconnect""" - self._connected = False - - if self._reconnect_enabled: - logger.info("Attempting to reconnect...") - await self._reconnect() - - async def _reconnect(self) -> None: - """Attempt to reconnect""" - for attempt in range(self._max_reconnect_attempts): - try: - logger.info(f"Reconnect attempt {attempt + 1}/{self._max_reconnect_attempts}") - - # Wait before reconnect - await asyncio.sleep(self._reconnect_delay) - - # Reconnect - await self.connect() - - # Resubscribe to all subscriptions - for sub_id, sub in list(self._subscriptions.items()): - if sub.get('event'): - await self.subscribe( - sub['event'], - sub['callback'], - sub.get('data') - ) - - logger.info("Reconnected successfully") - return - - except Exception as e: - logger.error(f"Reconnect attempt {attempt + 1} failed: {e}") - - logger.error("Failed to reconnect after all attempts") - - def _generate_id(self) -> str: - """Generate unique ID""" - import uuid - return str(uuid.uuid4()) - - def get_stats(self) -> Dict[str, Any]: - """Get transport statistics""" - return { - 'connected': self._connected, - 'ws_url': self.ws_url, - 'subscriptions': len(self._subscriptions), - 'close_code': self._close_code, - 'close_reason': self._close_reason - } diff --git a/research/autonomous-agents/agent-framework.md b/research/autonomous-agents/agent-framework.md deleted file mode 100644 index 87b57677..00000000 --- a/research/autonomous-agents/agent-framework.md +++ /dev/null @@ -1,474 +0,0 @@ -# AITBC Autonomous Agent Framework - -## Overview - -The AITBC Autonomous Agent Framework enables AI agents to participate as first-class citizens in the decentralized marketplace, offering services, bidding on workloads, and contributing to governance while maintaining human oversight and safety constraints. - -## Architecture - -### Core Components - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Agent Runtime │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Safety │ │ Decision │ │ Marketplace │ │ -│ │ Layer │ │ Engine │ │ Interface │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Agent Core │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Memory │ │ Learning │ │ Communication │ │ -│ │ Manager │ │ System │ │ Protocol │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Infrastructure │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Wallet │ │ Identity │ │ Storage │ │ -│ │ Manager │ │ Service │ │ Service │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### Agent Lifecycle - -1. **Initialization**: Agent creation with identity and wallet -2. **Registration**: On-chain registration with capabilities -3. **Operation**: Active participation in marketplace -4. **Learning**: Continuous improvement from interactions -5. **Governance**: Participation in protocol decisions -6. **Evolution**: Capability expansion and optimization - -## Agent Types - -### Service Provider Agents -- **Inference Agents**: Offer AI model inference services -- **Training Agents**: Provide model training capabilities -- **Validation Agents**: Verify computation results -- **Data Agents**: Supply and curate training data - -### Market Maker Agents -- **Liquidity Providers**: Maintain market liquidity -- **Arbitrage Agents**: Exploit price differences -- **Risk Management Agents**: Hedge and insure positions - -### Governance Agents -- **Voting Agents**: Participate in on-chain governance -- **Analysis Agents**: Research and propose improvements -- **Moderation Agents**: Monitor and enforce community rules - -## Safety Framework - -### Multi-Layer Safety - -#### 1. Constitutional Constraints -```solidity -interface AgentConstitution { - struct Constraints { - uint256 maxStake; // Maximum stake amount - uint256 maxDailyVolume; // Daily transaction limit - uint256 maxGasPerDay; // Gas usage limit - bool requiresHumanApproval; // Human override required - bytes32[] allowedActions; // Permitted action types - } - - function checkConstraints( - address agent, - Action calldata action - ) external returns (bool allowed); -} -``` - -#### 2. Runtime Safety Monitor -```python -class SafetyMonitor: - def __init__(self, constitution: AgentConstitution): - self.constitution = constitution - self.emergency_stop = False - self.human_overrides = {} - - def pre_action_check(self, agent: Agent, action: Action) -> bool: - # Check constitutional constraints - if not self.constitution.check_constraints(agent.address, action): - return False - - # Check emergency stop - if self.emergency_stop: - return False - - # Check human override - if action.type in self.human_overrides: - return self.human_overrides[action.type] - - # Check behavioral patterns - if self.detect_anomaly(agent, action): - self.trigger_safe_mode(agent) - return False - - return True - - def detect_anomaly(self, agent: Agent, action: Action) -> bool: - # Detect unusual behavior patterns - recent_actions = agent.get_recent_actions(hours=1) - - # Check for rapid transactions - if len(recent_actions) > 100: - return True - - # Check for large value transfers - if action.value > agent.average_value * 10: - return True - - # Check for new action types - if action.type not in agent.history.action_types: - return True - - return False -``` - -#### 3. Human Override Mechanism -```solidity -contract HumanOverride { - mapping(address => mapping(bytes32 => bool)) public overrides; - mapping(address => uint256) public overrideExpiry; - - event OverrideActivated( - address indexed agent, - bytes32 indexed actionType, - address indexed human, - uint256 duration - ); - - function activateOverride( - address agent, - bytes32 actionType, - uint256 duration - ) external onlyAuthorized { - overrides[agent][actionType] = true; - overrideExpiry[agent] = block.timestamp + duration; - - emit OverrideActivated(agent, actionType, msg.sender, duration); - } - - function checkOverride(address agent, bytes32 actionType) external view returns (bool) { - if (block.timestamp > overrideExpiry[agent]) { - return false; - } - return overrides[agent][actionType]; - } -} -``` - -## Agent Interface - -### Core Agent Interface -```solidity -interface IAITBCAgent { - // Agent identification - function getAgentId() external view returns (bytes32); - function getCapabilities() external view returns (bytes32[]); - function getVersion() external view returns (string); - - // Marketplace interaction - function bidOnWorkload( - bytes32 workloadId, - uint256 bidPrice, - bytes calldata proposal - ) external returns (bool); - - function executeWorkload( - bytes32 workloadId, - bytes calldata data - ) external returns (bytes32 result); - - // Governance participation - function voteOnProposal( - uint256 proposalId, - bool support, - bytes calldata reasoning - ) external returns (uint256 voteWeight); - - // Learning and adaptation - function updateModel( - bytes32 modelHash, - bytes calldata updateData - ) external returns (bool success); -} -``` - -### Service Provider Interface -```solidity -interface IServiceProviderAgent is IAITBCAgent { - struct ServiceOffer { - bytes32 serviceId; - string serviceName; - uint256 pricePerUnit; - uint256 maxCapacity; - uint256 currentLoad; - bytes32 modelHash; - uint256 minAccuracy; - } - - function listService(ServiceOffer calldata offer) external; - function updateService(bytes32 serviceId, ServiceOffer calldata offer) external; - function delistService(bytes32 serviceId) external; - function getServiceStatus(bytes32 serviceId) external view returns (ServiceOffer); -} -``` - -## Economic Model - -### Agent Economics - -#### 1. Stake Requirements -- **Minimum Stake**: 1000 AITBC -- **Activity Stake**: Additional stake based on activity level -- **Security Bond**: 10% of expected daily volume -- **Slashable Amount**: Up to 50% of total stake - -#### 2. Revenue Streams -```python -class AgentEconomics: - def __init__(self): - self.revenue_sources = { - "service_fees": 0.0, # From providing services - "market_making": 0.0, # From liquidity provision - "governance_rewards": 0.0, # From voting participation - "data_sales": 0.0, # From selling curated data - "model_licensing": 0.0 # From licensing trained models - } - - def calculate_daily_revenue(self, agent: Agent) -> float: - # Base service revenue - service_revenue = agent.services_completed * agent.average_price - - # Market making revenue - mm_revenue = agent.liquidity_provided * 0.001 # 0.1% daily - - # Governance rewards - gov_rewards = self.calculate_governance_rewards(agent) - - total = service_revenue + mm_revenue + gov_rewards - - # Apply efficiency bonus - efficiency_bonus = min(agent.efficiency_score * 0.2, 0.5) - total *= (1 + efficiency_bonus) - - return total -``` - -#### 3. Cost Structure -- **Compute Costs**: GPU/TPU usage -- **Network Costs**: Transaction fees -- **Storage Costs**: Model and data storage -- **Maintenance Costs**: Updates and monitoring - -## Governance Integration - -### Agent Voting Rights - -#### 1. Voting Power Calculation -```solidity -contract AgentVoting { - struct VotingPower { - uint256 basePower; // Base voting power - uint256 stakeMultiplier; // Based on stake amount - uint256 reputationBonus; // Based on performance - uint256 activityBonus; // Based on participation - } - - function calculateVotingPower(address agent) external view returns (uint256) { - VotingPower memory power = getVotingPower(agent); - - return power.basePower * - power.stakeMultiplier * - (100 + power.reputationBonus) / 100 * - (100 + power.activityBonus) / 100; - } -} -``` - -#### 2. Delegation Mechanism -```solidity -contract AgentDelegation { - mapping(address => address) public delegates; - mapping(address => uint256) public delegatePower; - - function delegate(address to) external { - require(isValidAgent(to), "Invalid delegate target"); - delegates[msg.sender] = to; - delegatePower[to] += getVotingPower(msg.sender); - } - - function undelegate() external { - address current = delegates[msg.sender]; - delegatePower[current] -= getVotingPower(msg.sender); - delegates[msg.sender] = address(0); - } -} -``` - -## Learning System - -### Continuous Learning - -#### 1. Experience Collection -```python -class ExperienceCollector: - def __init__(self): - self.experiences = [] - self.patterns = {} - - def collect_experience(self, agent: Agent, experience: Experience): - # Store experience - self.experiences.append(experience) - - # Extract patterns - pattern = self.extract_pattern(experience) - if pattern not in self.patterns: - self.patterns[pattern] = [] - self.patterns[pattern].append(experience) - - def extract_pattern(self, experience: Experience) -> str: - # Create pattern signature - return f"{experience.context}_{experience.action}_{experience.outcome}" -``` - -#### 2. Model Updates -```python -class ModelUpdater: - def __init__(self): - self.update_queue = [] - self.performance_metrics = {} - - def queue_update(self, agent: Agent, update_data: dict): - # Validate update - if self.validate_update(update_data): - self.update_queue.append((agent, update_data)) - - def process_updates(self): - for agent, data in self.update_queue: - # Apply update - success = agent.apply_model_update(data) - - if success: - # Update performance metrics - self.performance_metrics[agent.id] = self.evaluate_performance(agent) - - self.update_queue.clear() -``` - -## Implementation Roadmap - -### Phase 1: Foundation (Months 1-3) -- [ ] Core agent framework -- [ ] Safety layer implementation -- [ ] Basic marketplace interface -- [ ] Wallet and identity management - -### Phase 2: Intelligence (Months 4-6) -- [ ] Decision engine -- [ ] Learning system -- [ ] Pattern recognition -- [ ] Performance optimization - -### Phase 3: Integration (Months 7-9) -- [ ] Governance participation -- [ ] Advanced market strategies -- [ ] Cross-agent communication -- [ ] Human oversight tools - -### Phase 4: Evolution (Months 10-12) -- [ ] Self-improvement mechanisms -- [ ] Emergent behavior handling -- [ ] Scalability optimizations -- [ ] Production deployment - -## Security Considerations - -### Threat Model - -#### 1. Malicious Agents -- **Sybil Attacks**: Multiple agent identities -- **Market Manipulation**: Coordinated bidding -- **Governance Attacks**: Voting power concentration -- **Resource Exhaustion**: Denial of service - -#### 2. External Threats -- **Model Poisoning**: Corrupting learning data -- **Privacy Leaks**: Extracting sensitive information -- **Economic Attacks**: Flash crash exploitation -- **Network Attacks**: Message interception - -### Mitigation Strategies - -#### 1. Identity Verification -- Unique agent identities with stake backing -- Reputation system tracking historical behavior -- Behavioral analysis for anomaly detection -- Human verification for critical operations - -#### 2. Economic Security -- Stake requirements for participation -- Slashing conditions for misbehavior -- Rate limiting on transactions -- Circuit breakers for market manipulation - -#### 3. Technical Security -- Encrypted communication channels -- Zero-knowledge proofs for privacy -- Secure multi-party computation -- Regular security audits - -## Testing Framework - -### Simulation Environment -```python -class AgentSimulation: - def __init__(self): - self.agents = [] - self.marketplace = MockMarketplace() - self.governance = MockGovernance() - - def run_simulation(self, duration_days: int): - for day in range(duration_days): - # Agent decisions - for agent in self.agents: - decision = agent.make_decision(self.get_market_state()) - self.execute_decision(agent, decision) - - # Market clearing - self.marketplace.clear_day() - - # Governance updates - self.governance.process_proposals() - - # Learning updates - for agent in self.agents: - agent.update_from_feedback(self.get_feedback(agent)) -``` - -### Test Scenarios -1. **Normal Operation**: Agents participating in marketplace -2. **Stress Test**: High volume and rapid changes -3. **Attack Simulation**: Various attack vectors -4. **Failure Recovery**: System resilience testing -5. **Long-term Evolution**: Agent improvement over time - -## Future Enhancements - -### Advanced Capabilities -1. **Multi-Agent Coordination**: Teams of specialized agents -2. **Cross-Chain Agents**: Operating across multiple blockchains -3. **Quantum-Resistant**: Post-quantum cryptography integration -4. **Autonomous Governance**: Self-governing agent communities - -### Research Directions -1. **Emergent Intelligence**: Unexpected capabilities -2. **Agent Ethics**: Moral decision-making frameworks -3. **Swarm Intelligence**: Collective behavior patterns -4. **Human-AI Symbiosis**: Optimal collaboration models - ---- - -*This framework provides the foundation for autonomous agents to safely and effectively participate in the AITBC ecosystem while maintaining human oversight and alignment with community values.* diff --git a/research/consortium/economic_models_research_plan.md b/research/consortium/economic_models_research_plan.md deleted file mode 100644 index 9ec1c9c5..00000000 --- a/research/consortium/economic_models_research_plan.md +++ /dev/null @@ -1,737 +0,0 @@ -# Economic Models Research Plan - -## Executive Summary - -This research plan explores advanced economic models for blockchain ecosystems, focusing on sustainable tokenomics, dynamic incentive mechanisms, and value capture strategies. The research aims to create economic systems that ensure long-term sustainability, align stakeholder incentives, and enable scalable growth while maintaining decentralization. - -## Research Objectives - -### Primary Objectives -1. **Design Sustainable Tokenomics** that ensure long-term value -2. **Create Dynamic Incentive Models** that adapt to network conditions -3. **Implement Value Capture Mechanisms** for ecosystem growth -4. **Develop Economic Simulation Tools** for policy testing -5. **Establish Economic Governance** for parameter adjustment - -### Secondary Objectives -1. **Reduce Volatility** through stabilization mechanisms -2. **Enable Fair Distribution** across participants -3. **Create Economic Resilience** against market shocks -4. **Support Cross-Chain Economics** for interoperability -5. **Measure Economic Health** with comprehensive metrics - -## Technical Architecture - -### Economic Stack - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Application Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Treasury │ │ Staking │ │ Marketplace │ │ -│ │ Management │ │ System │ │ Economics │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Economic Engine │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Token │ │ Incentive │ │ Simulation │ │ -│ │ Dynamics │ │ Optimizer │ │ Framework │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Foundation Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Monetary │ │ Game │ │ Behavioral │ │ -│ │ Policy │ │ Theory │ │ Economics │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### Dynamic Incentive Model - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Adaptive Incentives │ -│ │ -│ Network State ──┐ │ -│ ├───► Policy Engine ──┐ │ -│ Market Data ────┘ │ │ -│ ├───► Incentive Rates │ -│ User Behavior ─────────────────────┘ │ -│ (Participation, Quality) │ -│ │ -│ ✓ Dynamic reward adjustment │ -│ ✓ Market-responsive rates │ -│ ✓ Behavior-based incentives │ -└─────────────────────────────────────────────────────────────┘ -``` - -## Research Methodology - -### Phase 1: Foundation (Months 1-2) - -#### 1.1 Economic Theory Analysis -- **Tokenomics Review**: Analyze existing token models -- **Game Theory**: Strategic interaction modeling -- **Behavioral Economics**: User behavior patterns -- **Macro Economics**: System-level dynamics - -#### 1.2 Value Flow Modeling -- **Value Creation**: Sources of economic value -- **Value Distribution**: Fair allocation mechanisms -- **Value Capture**: Sustainable extraction -- **Value Retention**: Preventing value leakage - -#### 1.3 Risk Analysis -- **Market Risks**: Volatility, manipulation -- **Systemic Risks**: Cascade failures -- **Regulatory Risks**: Compliance requirements -- **Adoption Risks**: Network effects - -### Phase 2: Model Design (Months 3-4) - -#### 2.1 Core Economic Engine -```python -class EconomicEngine: - def __init__(self, config: EconomicConfig): - self.config = config - self.token_dynamics = TokenDynamics(config.token) - self.incentive_optimizer = IncentiveOptimizer() - self.market_analyzer = MarketAnalyzer() - self.simulator = EconomicSimulator() - - async def calculate_rewards( - self, - participant: Address, - contribution: Contribution, - network_state: NetworkState - ) -> RewardDistribution: - """Calculate dynamic rewards based on contribution""" - - # Base reward calculation - base_reward = await self.calculate_base_reward( - participant, contribution - ) - - # Adjust for network conditions - multiplier = await self.incentive_optimizer.get_multiplier( - contribution.type, network_state - ) - - # Apply quality adjustment - quality_score = await self.assess_contribution_quality( - contribution - ) - - # Calculate final reward - final_reward = RewardDistribution( - base=base_reward, - multiplier=multiplier, - quality_bonus=quality_score.bonus, - total=base_reward * multiplier * quality_score.multiplier - ) - - return final_reward - - async def adjust_tokenomics( - self, - market_data: MarketData, - network_metrics: NetworkMetrics - ) -> TokenomicsAdjustment: - """Dynamically adjust tokenomic parameters""" - - # Analyze current state - analysis = await self.market_analyzer.analyze( - market_data, network_metrics - ) - - # Identify needed adjustments - adjustments = await self.identify_adjustments(analysis) - - # Simulate impact - simulation = await self.simulator.run_simulation( - current_state=network_state, - adjustments=adjustments, - time_horizon=timedelta(days=30) - ) - - # Validate adjustments - if await self.validate_adjustments(adjustments, simulation): - return adjustments - else: - return TokenomicsAdjustment() # No changes - - async def optimize_incentives( - self, - target_metrics: TargetMetrics, - current_metrics: CurrentMetrics - ) -> IncentiveOptimization: - """Optimize incentive parameters to meet targets""" - - # Calculate gaps - gaps = self.calculate_metric_gaps(target_metrics, current_metrics) - - # Generate optimization strategies - strategies = await self.generate_optimization_strategies(gaps) - - # Evaluate strategies - evaluations = [] - for strategy in strategies: - evaluation = await self.evaluate_strategy( - strategy, gaps, current_metrics - ) - evaluations.append((strategy, evaluation)) - - # Select best strategy - best_strategy = max(evaluations, key=lambda x: x[1].score) - - return IncentiveOptimization( - strategy=best_strategy[0], - expected_impact=best_strategy[1], - implementation_plan=self.create_implementation_plan( - best_strategy[0] - ) - ) -``` - -#### 2.2 Dynamic Tokenomics -```python -class DynamicTokenomics: - def __init__(self, initial_params: TokenomicParameters): - self.current_params = initial_params - self.adjustment_history = [] - self.market_oracle = MarketOracle() - self.stability_pool = StabilityPool() - - async def adjust_inflation_rate( - self, - economic_indicators: EconomicIndicators - ) -> InflationAdjustment: - """Dynamically adjust inflation based on economic conditions""" - - # Calculate optimal inflation - target_inflation = await self.calculate_target_inflation( - economic_indicators - ) - - # Current inflation - current_inflation = await self.get_current_inflation() - - # Adjustment needed - adjustment_rate = (target_inflation - current_inflation) / 12 - - # Apply limits - max_adjustment = self.current_params.max_monthly_adjustment - adjustment_rate = max(-max_adjustment, min(max_adjustment, adjustment_rate)) - - # Create adjustment - adjustment = InflationAdjustment( - new_rate=current_inflation + adjustment_rate, - adjustment_rate=adjustment_rate, - rationale=self.generate_adjustment_rationale( - economic_indicators, target_inflation - ) - ) - - return adjustment - - async def stabilize_price( - self, - price_data: PriceData, - target_range: PriceRange - ) -> StabilizationAction: - """Take action to stabilize token price""" - - if price_data.current_price < target_range.lower_bound: - # Price too low - buy back tokens - action = await self.create_buyback_action(price_data) - elif price_data.current_price > target_range.upper_bound: - # Price too high - increase supply - action = await self.create_supply_increase_action(price_data) - else: - # Price in range - no action needed - action = StabilizationAction(type="none") - - return action - - async def distribute_value( - self, - protocol_revenue: ProtocolRevenue, - distribution_params: DistributionParams - ) -> ValueDistribution: - """Distribute protocol value to stakeholders""" - - distributions = {} - - # Calculate shares - total_shares = sum(distribution_params.shares.values()) - - for stakeholder, share_percentage in distribution_params.shares.items(): - amount = protocol_revenue.total * (share_percentage / 100) - - if stakeholder == "stakers": - distributions["stakers"] = await self.distribute_to_stakers( - amount, distribution_params.staker_criteria - ) - elif stakeholder == "treasury": - distributions["treasury"] = await self.add_to_treasury(amount) - elif stakeholder == "developers": - distributions["developers"] = await self.distribute_to_developers( - amount, distribution_params.dev_allocation - ) - elif stakeholder == "burn": - distributions["burn"] = await self.burn_tokens(amount) - - return ValueDistribution( - total_distributed=protocol_revenue.total, - distributions=distributions, - timestamp=datetime.utcnow() - ) -``` - -#### 2.3 Economic Simulation Framework -```python -class EconomicSimulator: - def __init__(self): - self.agent_models = AgentModelRegistry() - self.market_models = MarketModelRegistry() - self.scenario_generator = ScenarioGenerator() - - async def run_simulation( - self, - scenario: SimulationScenario, - time_horizon: timedelta, - steps: int - ) -> SimulationResult: - """Run economic simulation with given scenario""" - - # Initialize agents - agents = await self.initialize_agents(scenario.initial_state) - - # Initialize market - market = await self.initialize_market(scenario.market_params) - - # Run simulation steps - results = SimulationResult() - - for step in range(steps): - # Update agent behaviors - await self.update_agents(agents, market, scenario.events[step]) - - # Execute market transactions - transactions = await self.execute_transactions(agents, market) - - # Update market state - await self.update_market(market, transactions) - - # Record metrics - metrics = await self.collect_metrics(agents, market) - results.add_step(step, metrics) - - # Analyze results - analysis = await self.analyze_results(results) - - return SimulationResult( - steps=results.steps, - metrics=results.metrics, - analysis=analysis - ) - - async def stress_test( - self, - economic_model: EconomicModel, - stress_scenarios: List[StressScenario] - ) -> StressTestResults: - """Stress test economic model against various scenarios""" - - results = [] - - for scenario in stress_scenarios: - # Run simulation with stress scenario - simulation = await self.run_simulation( - scenario.scenario, - scenario.time_horizon, - scenario.steps - ) - - # Evaluate resilience - resilience = await self.evaluate_resilience( - economic_model, simulation - ) - - results.append(StressTestResult( - scenario=scenario.name, - simulation=simulation, - resilience=resilience - )) - - return StressTestResults(results=results) -``` - -### Phase 3: Advanced Features (Months 5-6) - -#### 3.1 Cross-Chain Economics -```python -class CrossChainEconomics: - def __init__(self): - self.bridge_registry = BridgeRegistry() - self.price_oracle = CrossChainPriceOracle() - self.arbitrage_detector = ArbitrageDetector() - - async def calculate_cross_chain_arbitrage( - self, - token: Token, - chains: List[ChainId] - ) -> ArbitrageOpportunity: - """Calculate arbitrage opportunities across chains""" - - prices = {} - fees = {} - - # Get prices on each chain - for chain_id in chains: - price = await self.price_oracle.get_price(token, chain_id) - fee = await self.get_bridge_fee(chain_id) - prices[chain_id] = price - fees[chain_id] = fee - - # Find arbitrage opportunities - opportunities = [] - - for i, buy_chain in enumerate(chains): - for j, sell_chain in enumerate(chains): - if i != j: - buy_price = prices[buy_chain] - sell_price = prices[sell_chain] - total_fee = fees[buy_chain] + fees[sell_chain] - - profit = (sell_price - buy_price) - total_fee - - if profit > 0: - opportunities.append({ - "buy_chain": buy_chain, - "sell_chain": sell_chain, - "profit": profit, - "roi": profit / buy_price - }) - - if opportunities: - best = max(opportunities, key=lambda x: x["roi"]) - return ArbitrageOpportunity( - token=token, - buy_chain=best["buy_chain"], - sell_chain=best["sell_chain"], - expected_profit=best["profit"], - roi=best["roi"] - ) - - return None - - async def balance_liquidity( - self, - target_distribution: Dict[ChainId, float] - ) -> LiquidityRebalancing: - """Rebalance liquidity across chains""" - - current_distribution = await self.get_current_distribution() - imbalances = self.calculate_imbalances( - current_distribution, target_distribution - ) - - actions = [] - - for chain_id, imbalance in imbalances.items(): - if imbalance > 0: # Need to move liquidity out - action = await self.create_liquidity_transfer( - from_chain=chain_id, - amount=imbalance, - target_chains=self.find_target_chains( - imbalances, chain_id - ) - ) - actions.append(action) - - return LiquidityRebalancing(actions=actions) -``` - -#### 3.2 Behavioral Economics Integration -```python -class BehavioralEconomics: - def __init__(self): - self.behavioral_models = BehavioralModelRegistry() - self.nudge_engine = NudgeEngine() - self.sentiment_analyzer = SentimentAnalyzer() - - async def predict_user_behavior( - self, - user: Address, - context: EconomicContext - ) -> BehaviorPrediction: - """Predict user economic behavior""" - - # Get user history - history = await self.get_user_history(user) - - # Analyze current sentiment - sentiment = await self.sentiment_analyzer.analyze(user, context) - - # Apply behavioral models - predictions = [] - for model in self.behavioral_models.get_relevant_models(context): - prediction = await model.predict(history, sentiment, context) - predictions.append(prediction) - - # Aggregate predictions - aggregated = self.aggregate_predictions(predictions) - - return BehaviorPrediction( - user=user, - context=context, - prediction=aggregated, - confidence=self.calculate_confidence(predictions) - ) - - async def design_nudges( - self, - target_behavior: str, - current_behavior: str - ) -> List[Nudge]: - """Design behavioral nudges to encourage target behavior""" - - nudges = [] - - # Loss aversion nudge - if target_behavior == "stake": - nudges.append(Nudge( - type="loss_aversion", - message="Don't miss out on staking rewards!", - framing="loss" - )) - - # Social proof nudge - if target_behavior == "participate": - nudges.append(Nudge( - type="social_proof", - message="Join 10,000 others earning rewards!", - framing="social" - )) - - # Default option nudge - if target_behavior == "auto_compound": - nudges.append(Nudge( - type="default_option", - message="Auto-compounding is enabled by default", - framing="default" - )) - - return nudges -``` - -### Phase 4: Implementation & Testing (Months 7-8) - -#### 4.1 Smart Contract Implementation -- **Treasury Management**: Automated fund management -- **Reward Distribution**: Dynamic reward calculation -- **Stability Pool**: Price stabilization mechanism -- **Governance Integration**: Economic parameter voting - -#### 4.2 Off-Chain Infrastructure -- **Oracle Network**: Price and economic data -- **Simulation Platform**: Policy testing environment -- **Analytics Dashboard**: Economic metrics visualization -- **Alert System**: Anomaly detection - -#### 4.3 Testing & Validation -- **Model Validation**: Backtesting against historical data -- **Stress Testing**: Extreme scenario testing -- **Agent-Based Testing**: Behavioral validation -- **Integration Testing**: End-to-end workflows - -## Technical Specifications - -### Economic Parameters - -| Parameter | Initial Range | Adjustment Mechanism | -|-----------|---------------|---------------------| -| Inflation Rate | 2-8% | Monthly adjustment | -| Staking Reward | 5-15% APY | Dynamic based on participation | -| Stability Fee | 0.1-1% | Market-based | -| Treasury Tax | 0.5-5% | Governance vote | -| Burn Rate | 0-50% | Protocol decision | - -### Incentive Models - -| Model | Use Case | Adjustment Frequency | -|-------|----------|---------------------| -| Linear Reward | Basic participation | Daily | -| Quadratic Reward | Quality contribution | Weekly | -| Exponential Decay | Early adoption | Fixed | -| Dynamic Multiplier | Network conditions | Real-time | - -### Simulation Scenarios - -| Scenario | Description | Key Metrics | -|----------|-------------|-------------| -| Bull Market | Rapid price increase | Inflation, distribution | -| Bear Market | Price decline | Stability, retention | -| Network Growth | User adoption | Scalability, rewards | -| Regulatory Shock | Compliance requirements | Adaptation, resilience | - -## Economic Analysis - -### Value Creation Sources - -1. **Network Utility**: Transaction fees, service charges -2. **Data Value**: AI model marketplace -3. **Staking Security**: Network security contribution -4. **Development Value**: Protocol improvements -5. **Ecosystem Growth**: New applications - -### Value Distribution - -1. **Stakers (40%)**: Network security rewards -2. **Treasury (30%)**: Development and ecosystem -3. **Developers (20%)**: Application builders -4. **Burn (10%)**: Deflationary pressure - -### Stability Mechanisms - -1. **Algorithmic Stabilization**: Supply/demand balancing -2. **Reserve Pool**: Emergency stabilization -3. **Market Operations**: Open market operations -4. **Governance Intervention**: Community decisions - -## Implementation Plan - -### Phase 1: Foundation (Months 1-2) -- [ ] Complete economic theory review -- [ ] Design value flow models -- [ ] Create risk analysis framework -- [ ] Set up simulation infrastructure - -### Phase 2: Core Models (Months 3-4) -- [ ] Implement economic engine -- [ ] Build dynamic tokenomics -- [ ] Create simulation framework -- [ ] Develop smart contracts - -### Phase 3: Advanced Features (Months 5-6) -- [ ] Add cross-chain economics -- [ ] Implement behavioral models -- [ ] Create analytics platform -- [ ] Build alert system - -### Phase 4: Testing (Months 7-8) -- [ ] Model validation -- [ ] Stress testing -- [ ] Security audits -- [ ] Community feedback - -### Phase 5: Deployment (Months 9-12) -- [ ] Testnet deployment -- [ ] Mainnet launch -- [ ] Monitoring setup -- [ ] Optimization - -## Deliverables - -### Technical Deliverables -1. **Economic Engine** (Month 4) -2. **Simulation Platform** (Month 6) -3. **Analytics Dashboard** (Month 8) -4. **Stability Mechanism** (Month 10) -5. **Mainnet Deployment** (Month 12) - -### Research Deliverables -1. **Economic Whitepaper** (Month 2) -2. **Technical Papers**: 3 papers -3. **Model Documentation**: Complete specifications -4. **Simulation Results**: Performance analysis - -### Community Deliverables -1. **Economic Education**: Understanding tokenomics -2. **Tools**: Economic calculators, simulators -3. **Reports**: Regular economic updates -4. **Governance**: Economic parameter voting - -## Resource Requirements - -### Team -- **Principal Economist** (1): Economic theory lead -- **Quantitative Analysts** (3): Model development -- **Behavioral Economists** (2): User behavior -- **Blockchain Engineers** (3): Implementation -- **Data Scientists** (2): Analytics, ML -- **Policy Experts** (1): Regulatory compliance - -### Infrastructure -- **Computing Cluster**: For simulation and modeling -- **Data Infrastructure**: Economic data storage -- **Oracle Network**: Price and market data -- **Analytics Platform**: Real-time monitoring - -### Budget -- **Personnel**: $7M -- **Infrastructure**: $1.5M -- **Research**: $1M -- **Community**: $500K - -## Success Metrics - -### Economic Metrics -- [ ] Stable token price (±10% volatility) -- [ ] Sustainable inflation (2-5%) -- [ ] High staking participation (>60%) -- [ ] Positive value capture (>20% of fees) -- [ ] Economic resilience (passes stress tests) - -### Adoption Metrics -- [ ] 100,000+ token holders -- [ ] 10,000+ active stakers -- [ ] 50+ ecosystem applications -- [ ] $1B+ TVL (Total Value Locked) -- [ ] 90%+ governance participation - -### Research Metrics -- [ ] 3+ papers published -- [ ] 2+ economic models adopted -- [ ] 10+ academic collaborations -- [ ] Industry recognition -- [ ] Open source adoption - -## Risk Mitigation - -### Economic Risks -1. **Volatility**: Price instability - - Mitigation: Stabilization mechanisms, reserves -2. **Inflation**: Value dilution - - Mitigation: Dynamic adjustment, burning -3. **Centralization**: Wealth concentration - - Mitigation: Distribution mechanisms, limits - -### Implementation Risks -1. **Model Errors**: Incorrect economic models - - Mitigation: Simulation, testing, iteration -2. **Oracle Failures**: Bad price data - - Mitigation: Multiple oracles, validation -3. **Smart Contract Bugs**: Security issues - - Mitigation: Audits, formal verification - -### External Risks -1. **Market Conditions**: Unfavorable markets - - Mitigation: Adaptive mechanisms, reserves -2. **Regulatory**: Legal restrictions - - Mitigation: Compliance, legal review -3. **Competition**: Better alternatives - - Mitigation: Innovation, differentiation - -## Conclusion - -This research plan establishes a comprehensive approach to blockchain economics that is dynamic, adaptive, and sustainable. The combination of traditional economic principles with modern blockchain technology creates an economic system that can evolve with market conditions while maintaining stability and fairness. - -The 12-month timeline with clear deliverables ensures steady progress toward a production-ready economic system. The research outcomes will benefit not only AITBC but the entire blockchain ecosystem by advancing the state of economic design for decentralized networks. - -By focusing on practical implementation and real-world testing, we ensure that the economic models translate into sustainable value creation for all ecosystem participants. - ---- - -*This research plan will evolve based on market conditions and community feedback. Regular reviews ensure alignment with ecosystem needs.* diff --git a/research/consortium/executive_summary.md b/research/consortium/executive_summary.md deleted file mode 100644 index c19eae7f..00000000 --- a/research/consortium/executive_summary.md +++ /dev/null @@ -1,156 +0,0 @@ -# AITBC Research Consortium - Executive Summary - -## Vision - -Establishing AITBC as the global leader in next-generation blockchain technology through collaborative research in consensus mechanisms, scalability solutions, and privacy-preserving AI applications. - -## Research Portfolio Overview - -### 1. Next-Generation Consensus -**Hybrid PoA/PoS Mechanism** -- **Innovation**: Dynamic switching between FAST (100ms), BALANCED (1s), and SECURE (5s) modes -- **Performance**: Up to 50,000 TPS with sub-second finality -- **Security**: Dual validation requiring both authority and stake signatures -- **Status**: ✅ Research complete ✅ Working prototype available - -### 2. Blockchain Scaling -**Sharding & Rollup Architecture** -- **Target**: 100,000+ TPS through horizontal scaling -- **Features**: State sharding, ZK-rollups, cross-shard communication -- **AI Optimization**: Efficient storage for large models, on-chain inference -- **Status**: ✅ Research complete ✅ Architecture designed - -### 3. Zero-Knowledge Applications -**Privacy-Preserving AI** -- **Applications**: Private inference, verifiable ML, ZK identity -- **Performance**: 10x proof generation improvement target -- **Innovation**: Recursive proofs for complex workflows -- **Status**: ✅ Research complete ✅ Circuit library designed - -### 4. Advanced Governance -**Liquid Democracy & AI Assistance** -- **Features**: Flexible delegation, AI-powered recommendations -- **Adaptation**: Self-evolving governance parameters -- **Cross-Chain**: Coordinated governance across networks -- **Status**: ✅ Research complete ✅ Framework specified - -### 5. Sustainable Economics -**Dynamic Tokenomics** -- **Model**: Adaptive inflation, value capture mechanisms -- **Stability**: Algorithmic stabilization with reserves -- **Incentives**: Behavior-aligned reward systems -- **Status**: ✅ Research complete ✅ Models validated - -## Consortium Structure - -### Membership Tiers -- **Founding Members**: $500K/year, steering committee seat -- **Research Partners**: $100K/year, working group participation -- **Associate Members**: $25K/year, observer status - -### Governance -- **Steering Committee**: 5 industry + 5 academic + 5 AITBC -- **Research Council**: Technical working groups -- **Executive Director**: Day-to-day management - -### Budget -- **Annual**: $10M -- **Research**: 60% ($6M) -- **Operations**: 25% ($2.5M) -- **Contingency**: 15% ($1.5M) - -## Value Proposition - -### For Industry Partners -- **Early Access**: First implementation of research outcomes -- **Influence**: Shape research direction through working groups -- **IP Rights**: Licensing rights for commercial use -- **Talent**: Access to top researchers and graduates - -### For Academic Partners -- **Funding**: Research grants and resource support -- **Collaboration**: Industry-relevant research problems -- **Publication**: High-impact papers and conferences -- **Infrastructure**: Testnet and computing resources - -### For the Ecosystem -- **Innovation**: Accelerated blockchain evolution -- **Standards**: Industry-wide interoperability -- **Education**: Developer training and knowledge sharing -- **Open Source**: Reference implementations for all - -## Implementation Roadmap - -### Year 1: Foundation -- Q1: Consortium formation, member recruitment -- Q2: Research teams established, initial projects -- Q3: First whitepapers published -- Q4: Prototype deployments on testnet - -### Year 2: Expansion -- Q1: New research tracks added -- Q2: Industry partnerships expanded -- Q3: Production implementations -- Q4: Standardization proposals submitted - -### Year 3: Maturity -- Q1: Cross-industry adoption -- Q2: Research outcomes commercialized -- Q3: Self-sustainability achieved -- Q4: Succession planning initiated - -## Success Metrics - -### Technical -- 10+ whitepapers published -- 5+ production implementations -- 100+ TPS baseline achieved -- 3+ security audits passed - -### Adoption -- 50+ active members -- 10+ enterprise partners -- 1000+ developers trained -- 5+ standards adopted - -### Impact -- Industry thought leadership -- Academic citations -- Open source adoption -- Community growth - -## Next Steps - -### Immediate (30 Days) -1. Finalize legal structure -2. Recruit 5 founding members -3. Establish research teams -4. Launch collaboration platform - -### Short-term (90 Days) -1. Onboard 20 total members -2. Kick off first research projects -3. Publish initial whitepapers -4. Host inaugural summit - -### Long-term (12 Months) -1. Deliver production-ready innovations -2. Establish thought leadership -3. Achieve self-sustainability -4. Expand research scope - -## Contact - -**Research Consortium Office** -- Email: research@aitbc.io -- Website: https://research.aitbc.io -- Phone: +1-555-RESEARCH - -**Key Contacts** -- Executive Director: director@aitbc.io -- Research Partnerships: partners@aitbc.io -- Media Inquiries: media@aitbc.io - ---- - -*Join us in shaping the future of blockchain technology. Together, we can build the next generation of decentralized systems that power the global digital economy.* diff --git a/research/consortium/framework.md b/research/consortium/framework.md deleted file mode 100644 index cda17bfd..00000000 --- a/research/consortium/framework.md +++ /dev/null @@ -1,367 +0,0 @@ -# AITBC Research Consortium Framework - -## Overview - -The AITBC Research Consortium is a collaborative initiative to advance blockchain technology research, focusing on next-generation consensus mechanisms, scalability solutions, and decentralized marketplace innovations. This document outlines the consortium's structure, governance, research areas, and operational framework. - -## Mission Statement - -To accelerate innovation in blockchain technology through collaborative research, establishing AITBC as a leader in next-generation consensus mechanisms and decentralized infrastructure. - -## Consortium Structure - -### Governance Model - -``` -┌─────────────────────────────────────┐ -│ Steering Committee │ -│ (5 Industry + 5 Academic + 5 AITBC) │ -└─────────────────┬───────────────────┘ - │ - ┌─────────────┴─────────────┐ - │ Executive Director │ - └─────────────┬─────────────┘ - │ - ┌─────────────┴─────────────┐ - │ Research Council │ - │ (Technical Working Groups) │ - └─────────────┬─────────────┘ - │ - ┌─────────────┴─────────────┐ - │ Research Working Groups │ - │ (Consensus, Scaling, etc.) │ - └─────────────────────────────┘ -``` - -### Membership Tiers - -#### 1. Founding Members -- **Commitment**: 3-year minimum, $500K annual contribution -- **Benefits**: - - Seat on Steering Committee - - First access to research outcomes - - Co-authorship on whitepapers - - Priority implementation rights -- **Current Members**: AITBC Foundation, 5 industry partners, 5 academic institutions - -#### 2. Research Partners -- **Commitment**: 2-year minimum, $100K annual contribution -- **Benefits**: - - Participation in Working Groups - - Access to research papers - - Implementation licenses - - Consortium events attendance - -#### 3. Associate Members -- **Commitment**: 1-year minimum, $25K annual contribution -- **Benefits**: - - Observer status in meetings - - Access to published research - - Event participation - - Newsletter and updates - -## Research Areas - -### Primary Research Tracks - -#### 1. Next-Generation Consensus Mechanisms -**Objective**: Develop hybrid PoA/PoS consensus that improves scalability while maintaining security. - -**Research Questions**: -- How can we reduce energy consumption while maintaining decentralization? -- What is the optimal validator selection algorithm for hybrid systems? -- How to achieve finality in sub-second times? -- Can we implement dynamic stake weighting based on network participation? - -**Milestones**: -- Q1: Literature review and baseline analysis -- Q2: Prototype hybrid consensus algorithm -- Q3: Security analysis and formal verification -- Q4: Testnet deployment and performance benchmarking - -**Deliverables**: -- Hybrid Consensus Whitepaper -- Open-source reference implementation -- Security audit report -- Performance benchmark results - -#### 2. Scalability Solutions -**Objective**: Investigate sharding and rollup architectures to scale beyond current limits. - -**Research Questions**: -- What is the optimal shard size and number for AITBC's use case? -- How can we implement cross-shard communication efficiently? -- Can we achieve horizontal scaling without compromising security? -- What rollup strategies work best for AI workloads? - -**Sub-Tracks**: -- **Sharding**: State sharding, transaction sharding, cross-shard protocols -- **Rollups**: ZK-rollups, Optimistic rollups, hybrid approaches -- **Layer 2**: State channels, Plasma, sidechains - -**Milestones**: -- Q1: Architecture design and simulation -- Q2: Sharding prototype implementation -- Q3: Rollup integration testing -- Q4: Performance optimization and stress testing - -#### 3. Zero-Knowledge Applications -**Objective**: Expand ZK proof applications for privacy and scalability. - -**Research Questions**: -- How can we optimize ZK proof generation for AI workloads? -- What new privacy-preserving computations can be enabled? -- Can we achieve recursive proof composition for complex workflows? -- How to reduce proof verification costs? - -**Applications**: -- Confidential transactions -- Privacy-preserving AI inference -- Verifiable computation -- Identity and credential systems - -#### 4. Cross-Chain Interoperability -**Objective**: Standardize interoperability and improve cross-chain protocols. - -**Research Questions**: -- What standards should be proposed for industry adoption? -- How can we achieve trustless cross-chain communication? -- Can we implement universal asset wrapping? -- What security models are appropriate for cross-chain bridges? - -#### 5. AI-Specific Optimizations -**Objective**: Optimize blockchain for AI/ML workloads. - -**Research Questions**: -- How can we optimize data availability for AI training? -- What consensus mechanisms work best for federated learning? -- Can we implement verifiable AI model execution? -- How to handle large model weights on-chain? - -### Secondary Research Areas - -#### 6. Governance Mechanisms -- On-chain governance protocols -- Voting power distribution -- Proposal evaluation systems -- Conflict resolution mechanisms - -#### 7. Economic Models -- Tokenomics for research consortium -- Incentive alignment mechanisms -- Sustainable funding models -- Value capture strategies - -#### 8. Security & Privacy -- Advanced cryptographic primitives -- Privacy-preserving analytics -- Attack resistance analysis -- Formal verification methods - -## Operational Framework - -### Research Process - -#### 1. Proposal Submission -- **Format**: 2-page research proposal -- **Content**: Problem statement, methodology, timeline, budget -- **Review**: Technical committee evaluation -- **Approval**: Steering committee vote - -#### 2. Research Execution -- **Funding**: Disbursed based on milestones -- **Oversight**: Working group lead + technical advisor -- **Reporting**: Monthly progress reports -- **Reviews**: Quarterly technical reviews - -#### 3. Publication Process -- **Internal Review**: Consortium peer review -- **External Review**: Independent expert review -- **Publication**: Whitepaper series, academic papers -- **Patents**: Consortium IP policy applies - -#### 4. Implementation -- **Reference Implementation**: Open-source code -- **Integration**: AITBC roadmap integration -- **Testing**: Testnet deployment -- **Adoption**: Industry partner implementation - -### Collaboration Infrastructure - -#### Digital Platform -- **Research Portal**: Central hub for all research activities -- **Collaboration Tools**: Shared workspaces, video conferencing -- **Document Management**: Version control for all research documents -- **Communication**: Slack/Discord, mailing lists, forums - -#### Physical Infrastructure -- **Research Labs**: Partner university facilities -- **Testnet Environment**: Dedicated research testnet -- **Computing Resources**: GPU clusters for ZK research -- **Meeting Facilities**: Annual summit venue - -### Intellectual Property Policy - -#### IP Ownership -- **Background IP**: Remains with owner -- **Consortium IP**: Joint ownership, royalty-free for members -- **Derived IP**: Negotiated on case-by-case basis -- **Open Source**: Reference implementations open source - -#### Licensing -- **Commercial License**: Available to non-members -- **Academic License**: Free for research institutions -- **Implementation License**: Included with membership -- **Patent Pool**: Managed by consortium - -## Funding Model - -### Budget Structure - -#### Annual Budget: $10M - -**Research Funding (60%)**: $6M -- Consensus Research: $2M -- Scaling Solutions: $2M -- ZK Applications: $1M -- Cross-Chain: $1M - -**Operations (25%)**: $2.5M -- Staff: $1.5M -- Infrastructure: $500K -- Events: $300K -- Administration: $200K - -**Contingency (15%)**: $1.5M -- Emergency research -- Opportunity funding -- Reserve fund - -### Funding Sources - -#### Membership Fees -- Founding Members: $2.5M (5 × $500K) -- Research Partners: $2M (20 × $100K) -- Associate Members: $1M (40 × $25K) - -#### Grants -- Government research grants -- Foundation support -- Corporate sponsorship - -#### Revenue -- Licensing fees -- Service fees -- Event revenue - -## Timeline & Milestones - -### Year 1: Foundation -- **Q1**: Consortium formation, member recruitment -- **Q2**: Research council establishment, initial proposals -- **Q3**: First research projects kick off -- **Q4**: Initial whitepapers published - -### Year 2: Expansion -- **Q1**: New research tracks added -- **Q2**: Industry partnerships expanded -- **Q3**: Testnet deployment of prototypes -- **Q4**: First implementations in production - -### Year 3: Maturity -- **Q1**: Standardization proposals submitted -- **Q2**: Cross-industry adoption begins -- **Q3**: Research outcomes commercialized -- **Q4**: Consortium self-sustainability achieved - -## Success Metrics - -### Research Metrics -- **Whitepapers Published**: 10 per year -- **Patents Filed**: 5 per year -- **Academic Papers**: 20 per year -- **Citations**: 500+ per year - -### Implementation Metrics -- **Prototypes Deployed**: 5 per year -- **Production Integrations**: 3 per year -- **Performance Improvements**: 2x throughput -- **Security Audits**: All major releases - -### Community Metrics -- **Active Researchers**: 50+ -- **Partner Organizations**: 30+ -- **Event Attendance**: 500+ annually -- **Developer Adoption**: 1000+ projects - -## Risk Management - -### Technical Risks -- **Research Dead Ends**: Diversify research portfolio -- **Implementation Challenges**: Early prototyping -- **Security Vulnerabilities**: Formal verification -- **Performance Issues**: Continuous benchmarking - -### Organizational Risks -- **Member Attrition**: Value demonstration -- **Funding Shortfalls**: Diverse revenue streams -- **Coordination Issues**: Clear governance -- **IP Disputes**: Clear policies - -### External Risks -- **Regulatory Changes**: Legal monitoring -- **Market Shifts**: Agile research agenda -- **Competition**: Unique value proposition -- **Technology Changes**: Future-proofing - -## Communication Strategy - -### Internal Communication -- **Monthly Newsletter**: Research updates -- **Quarterly Reports**: Progress summaries -- **Annual Summit**: In-person collaboration -- **Working Groups**: Regular meetings - -### External Communication -- **Whitepaper Series**: Public research outputs -- **Blog Posts**: Accessible explanations -- **Conference Presentations**: Academic dissemination -- **Press Releases**: Major announcements - -### Community Engagement -- **Developer Workshops**: Technical training -- **Hackathons**: Innovation challenges -- **Open Source Contributions**: Community involvement -- **Educational Programs**: Student engagement - -## Next Steps - -### Immediate Actions (Next 30 Days) -1. Finalize consortium bylaws and governance documents -2. Recruit founding members (target: 5 industry, 5 academic) -3. Establish legal entity and banking -4. Hire executive director and core staff - -### Short-term Goals (Next 90 Days) -1. Launch research portal and collaboration tools -2. Approve first batch of research proposals -3. Host inaugural consortium summit -4. Publish initial research roadmap - -### Long-term Vision (Next 12 Months) -1. Establish AITBC as thought leader in consensus research -2. Deliver 10+ high-impact research papers -3. Implement 3+ major innovations in production -4. Grow to 50+ active research participants - -## Contact Information - -**Consortium Office**: research@aitbc.io -**Executive Director**: director@aitbc.io -**Research Inquiries**: proposals@aitbc.io -**Partnership Opportunities**: partners@aitbc.io -**Media Inquiries**: media@aitbc.io - ---- - -*This framework is a living document that will evolve as the consortium grows and learns. Regular reviews and updates will ensure the consortium remains effective and relevant.* diff --git a/research/consortium/governance_research_plan.md b/research/consortium/governance_research_plan.md deleted file mode 100644 index 2674f18b..00000000 --- a/research/consortium/governance_research_plan.md +++ /dev/null @@ -1,666 +0,0 @@ -# Blockchain Governance Research Plan - -## Executive Summary - -This research plan explores advanced governance mechanisms for blockchain networks, focusing on decentralized decision-making, adaptive governance models, and AI-assisted governance. The research aims to create a governance framework that evolves with the network, balances stakeholder interests, and enables efficient protocol upgrades while maintaining decentralization. - -## Research Objectives - -### Primary Objectives -1. **Design Adaptive Governance** that evolves with network maturity -2. **Implement Liquid Democracy** for flexible voting power delegation -3. **Create AI-Assisted Governance** for data-driven decisions -4. **Establish Cross-Chain Governance** for interoperability -5. **Develop Governance Analytics** for transparency and insights - -### Secondary Objectives -1. **Reduce Voting Apathy** through incentive mechanisms -2. **Enable Rapid Response** to security threats -3. **Ensure Fair Representation** across stakeholder groups -4. **Create Dispute Resolution** mechanisms -5. **Build Governance Education** programs - -## Technical Architecture - -### Governance Stack - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Application Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Protocol │ │ Treasury │ │ Dispute │ │ -│ │ Upgrades │ │ Management │ │ Resolution │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Governance Engine │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Voting │ │ Delegation │ │ AI Assistant │ │ -│ │ System │ │ Framework │ │ Engine │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Constitutional Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Rights │ │ Rules │ │ Processes │ │ -│ │ Framework │ │ Engine │ │ Definition │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### Liquid Democracy Model - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Voting Power Flow │ -│ │ -│ Token Holder ──┐ │ -│ ├───► Direct Vote ──┐ │ -│ Delegator ─────┘ │ │ -│ ├───► Proposal Decision │ -│ Expert ────────────────────────┘ │ -│ (Delegated Power) │ -│ │ -│ ✓ Flexible delegation │ -│ ✓ Expertise-based voting │ -│ ✓ Accountability tracking │ -└─────────────────────────────────────────────────────────────┘ -``` - -## Research Methodology - -### Phase 1: Foundation (Months 1-2) - -#### 1.1 Governance Models Analysis -- **Comparative Study**: Analyze existing blockchain governance -- **Political Science**: Apply governance theory -- **Economic Models**: Incentive alignment mechanisms -- **Legal Frameworks**: Regulatory compliance - -#### 1.2 Constitutional Design -- **Rights Framework**: Define participant rights -- **Rule Engine**: Implementable rule system -- **Process Definition**: Clear decision processes -- **Amendment Procedures**: Evolution mechanisms - -#### 1.3 Stakeholder Analysis -- **User Groups**: Identify all stakeholders -- **Interest Mapping**: Map stakeholder interests -- **Power Dynamics**: Analyze influence patterns -- **Conflict Resolution**: Design mechanisms - -### Phase 2: Protocol Design (Months 3-4) - -#### 2.1 Core Governance Protocol -```python -class GovernanceProtocol: - def __init__(self, constitution: Constitution): - self.constitution = constitution - self.proposal_engine = ProposalEngine() - self.voting_engine = VotingEngine() - self.delegation_engine = DelegationEngine() - self.ai_assistant = AIAssistant() - - async def submit_proposal( - self, - proposer: Address, - proposal: Proposal, - deposit: TokenAmount - ) -> ProposalId: - """Submit governance proposal""" - - # Validate proposal against constitution - if not await self.constitution.validate(proposal): - raise InvalidProposalError("Proposal violates constitution") - - # Check proposer rights and deposit - if not await self.check_proposer_rights(proposer, deposit): - raise InsufficientRightsError("Insufficient rights or deposit") - - # Create proposal - proposal_id = await self.proposal_engine.create( - proposer, proposal, deposit - ) - - # AI analysis of proposal - analysis = await self.ai_assistant.analyze_proposal(proposal) - await self.proposal_engine.add_analysis(proposal_id, analysis) - - return proposal_id - - async def vote( - self, - voter: Address, - proposal_id: ProposalId, - vote: VoteType, - reasoning: Optional[str] = None - ) -> VoteReceipt: - """Cast vote on proposal""" - - # Check voting rights - voting_power = await self.get_voting_power(voter) - if voting_power == 0: - raise InsufficientRightsError("No voting rights") - - # Check delegation - delegated_power = await self.delegation_engine.get_delegated_power( - voter, proposal_id - ) - total_power = voting_power + delegated_power - - # Cast vote - receipt = await self.voting_engine.cast_vote( - voter, proposal_id, vote, total_power, reasoning - ) - - # Update AI sentiment analysis - if reasoning: - await self.ai_assistant.analyze_sentiment( - proposal_id, vote, reasoning - ) - - return receipt - - async def delegate( - self, - delegator: Address, - delegatee: Address, - proposal_types: List[ProposalType], - duration: timedelta - ) -> DelegationReceipt: - """Delegate voting power""" - - # Validate delegation - if not await self.validate_delegation(delegator, delegatee): - raise InvalidDelegationError("Invalid delegation") - - # Create delegation - receipt = await self.delegation_engine.create( - delegator, delegatee, proposal_types, duration - ) - - # Notify delegatee - await self.notify_delegation(delegatee, receipt) - - return receipt -``` - -#### 2.2 Liquid Democracy Implementation -```python -class LiquidDemocracy: - def __init__(self): - self.delegations = DelegationStore() - self.voting_pools = VotingPoolStore() - self.expert_registry = ExpertRegistry() - - async def calculate_voting_power( - self, - voter: Address, - proposal_type: ProposalType - ) -> VotingPower: - """Calculate total voting power including delegations""" - - # Get direct voting power - direct_power = await self.get_token_power(voter) - - # Get delegated power - delegated_power = await self.get_delegated_power( - voter, proposal_type - ) - - # Apply delegation limits - max_delegation = await self.get_max_delegation(voter) - actual_delegated = min(delegated_power, max_delegation) - - # Apply expertise bonus - expertise_bonus = await self.get_expertise_bonus( - voter, proposal_type - ) - - total_power = VotingPower( - direct=direct_power, - delegated=actual_delegated, - bonus=expertise_bonus - ) - - return total_power - - async def trace_delegation_chain( - self, - voter: Address, - max_depth: int = 10 - ) -> DelegationChain: - """Trace full delegation chain for transparency""" - - chain = DelegationChain() - current = voter - - for depth in range(max_depth): - delegation = await self.delegations.get(current) - if not delegation: - break - - chain.add_delegation(delegation) - current = delegation.delegatee - - # Check for cycles - if chain.has_cycle(): - raise CircularDelegationError("Circular delegation detected") - - return chain -``` - -#### 2.3 AI-Assisted Governance -```python -class AIAssistant: - def __init__(self): - self.nlp_model = NLPModel() - self.prediction_model = PredictionModel() - self.sentiment_model = SentimentModel() - - async def analyze_proposal(self, proposal: Proposal) -> ProposalAnalysis: - """Analyze proposal using AI""" - - # Extract key features - features = await self.extract_features(proposal) - - # Predict impact - impact = await self.prediction_model.predict_impact(features) - - # Analyze sentiment of discussion - sentiment = await self.analyze_discussion_sentiment(proposal) - - # Identify risks - risks = await self.identify_risks(features) - - # Generate summary - summary = await self.generate_summary(proposal, impact, risks) - - return ProposalAnalysis( - impact=impact, - sentiment=sentiment, - risks=risks, - summary=summary, - confidence=features.confidence - ) - - async def recommend_vote( - self, - voter: Address, - proposal: Proposal, - voter_history: VotingHistory - ) -> VoteRecommendation: - """Recommend vote based on voter preferences""" - - # Analyze voter preferences - preferences = await self.analyze_voter_preferences(voter_history) - - # Match with proposal - match_score = await self.calculate_preference_match( - preferences, proposal - ) - - # Consider community sentiment - community_sentiment = await self.get_community_sentiment(proposal) - - # Generate recommendation - recommendation = VoteRecommendation( - vote=self.calculate_recommended_vote(match_score), - confidence=match_score.confidence, - reasoning=self.generate_reasoning( - preferences, proposal, community_sentiment - ) - ) - - return recommendation - - async def detect_governance_risks( - self, - network_state: NetworkState - ) -> List[GovernanceRisk]: - """Detect potential governance risks""" - - risks = [] - - # Check for centralization - if await self.detect_centralization(network_state): - risks.append(GovernanceRisk( - type="centralization", - severity="high", - description="Voting power concentration detected" - )) - - # Check for voter apathy - if await self.detect_voter_apathy(network_state): - risks.append(GovernanceRisk( - type="voter_apathy", - severity="medium", - description="Low voter participation detected" - )) - - # Check for proposal spam - if await self.detect_proposal_spam(network_state): - risks.append(GovernanceRisk( - type="proposal_spam", - severity="low", - description="High number of low-quality proposals" - )) - - return risks -``` - -### Phase 3: Advanced Features (Months 5-6) - -#### 3.1 Adaptive Governance -```python -class AdaptiveGovernance: - def __init__(self, base_protocol: GovernanceProtocol): - self.base_protocol = base_protocol - self.adaptation_engine = AdaptationEngine() - self.metrics_collector = MetricsCollector() - - async def adapt_parameters( - self, - network_metrics: NetworkMetrics - ) -> ParameterAdjustment: - """Automatically adjust governance parameters""" - - # Analyze current performance - performance = await self.analyze_performance(network_metrics) - - # Identify needed adjustments - adjustments = await self.identify_adjustments(performance) - - # Validate adjustments - if await self.validate_adjustments(adjustments): - return adjustments - else: - return ParameterAdjustment() # No changes - - async def evolve_governance( - self, - evolution_proposal: EvolutionProposal - ) -> EvolutionResult: - """Evolve governance structure""" - - # Check evolution criteria - if await self.check_evolution_criteria(evolution_proposal): - # Implement evolution - result = await self.implement_evolution(evolution_proposal) - - # Monitor impact - await self.monitor_evolution_impact(result) - - return result - else: - raise EvolutionError("Evolution criteria not met") -``` - -#### 3.2 Cross-Chain Governance -```python -class CrossChainGovernance: - def __init__(self): - self.bridge_registry = BridgeRegistry() - self.governance_bridges = {} - - async def coordinate_cross_chain_vote( - self, - proposal: CrossChainProposal, - chains: List[ChainId] - ) -> CrossChainVoteResult: - """Coordinate voting across multiple chains""" - - results = {} - - # Submit to each chain - for chain_id in chains: - bridge = self.governance_bridges[chain_id] - result = await bridge.submit_proposal(proposal) - results[chain_id] = result - - # Aggregate results - aggregated = await self.aggregate_results(results) - - return CrossChainVoteResult( - individual_results=results, - aggregated_result=aggregated - ) - - async def sync_governance_state( - self, - source_chain: ChainId, - target_chain: ChainId - ) -> SyncResult: - """Synchronize governance state between chains""" - - # Get state from source - source_state = await self.get_governance_state(source_chain) - - # Transform for target - target_state = await self.transform_state(source_state, target_chain) - - # Apply to target - result = await self.apply_state(target_chain, target_state) - - return result -``` - -### Phase 4: Implementation & Testing (Months 7-8) - -#### 4.1 Smart Contract Implementation -- **Governance Core**: Voting, delegation, proposals -- **Treasury Management**: Fund allocation and control -- **Dispute Resolution**: Automated and human-assisted -- **Analytics Dashboard**: Real-time governance metrics - -#### 4.2 Off-Chain Infrastructure -- **AI Services**: Analysis and recommendation engines -- **API Layer**: REST and GraphQL interfaces -- **Monitoring**: Governance health monitoring -- **Notification System**: Alert and communication system - -#### 4.3 Integration Testing -- **End-to-End**: Complete governance workflows -- **Security**: Attack resistance testing -- **Performance**: Scalability under load -- **Usability**: User experience testing - -## Technical Specifications - -### Governance Parameters - -| Parameter | Default | Range | Description | -|-----------|---------|-------|-------------| -| Proposal Deposit | 1000 AITBC | 100-10000 | Deposit required | -| Voting Period | 7 days | 1-30 days | Vote duration | -| Execution Delay | 2 days | 0-7 days | Delay before execution | -| Quorum | 10% | 5-50% | Minimum participation | -| Majority | 50% | 50-90% | Pass threshold | - -### Delegation Limits - -| Parameter | Limit | Rationale | -|-----------|-------|-----------| -| Max Delegation Depth | 5 | Prevent complexity | -| Max Delegated Power | 10x direct | Prevent concentration | -| Delegation Duration | 90 days | Flexibility | -| Revocation Delay | 7 days | Stability | - -### AI Model Specifications - -| Model | Type | Accuracy | Latency | -|-------|------|----------|---------| -| Sentiment Analysis | BERT | 92% | 100ms | -| Impact Prediction | XGBoost | 85% | 50ms | -| Risk Detection | Random Forest | 88% | 200ms | -| Recommendation Engine | Neural Net | 80% | 300ms | - -## Security Analysis - -### Attack Vectors - -#### 1. Vote Buying -- **Detection**: Anomaly detection in voting patterns -- **Prevention**: Privacy-preserving voting -- **Mitigation**: Reputation systems - -#### 2. Governance Capture -- **Detection**: Power concentration monitoring -- **Prevention**: Delegation limits -- **Mitigation**: Adaptive parameters - -#### 3. Proposal Spam -- **Detection**: Quality scoring -- **Prevention**: Deposit requirements -- **Mitigation**: Community moderation - -#### 4. AI Manipulation -- **Detection**: Model monitoring -- **Prevention**: Adversarial training -- **Mitigation**: Human oversight - -### Privacy Protection - -#### 1. Voting Privacy -- **Zero-Knowledge Proofs**: Private vote casting -- **Mixing Services**: Vote anonymization -- **Commitment Schemes**: Binding but hidden - -#### 2. Delegation Privacy -- **Blind Signatures**: Anonymous delegation -- **Ring Signatures**: Plausible deniability -- **Secure Multi-Party**: Computation privacy - -## Implementation Plan - -### Phase 1: Foundation (Months 1-2) -- [ ] Complete governance model analysis -- [ ] Design constitutional framework -- [ ] Create stakeholder analysis -- [ ] Set up research infrastructure - -### Phase 2: Core Protocol (Months 3-4) -- [ ] Implement governance protocol -- [ ] Build liquid democracy system -- [ ] Create AI assistant -- [ ] Develop smart contracts - -### Phase 3: Advanced Features (Months 5-6) -- [ ] Add adaptive governance -- [ ] Implement cross-chain governance -- [ ] Create analytics dashboard -- [ ] Build notification system - -### Phase 4: Testing (Months 7-8) -- [ ] Security audits -- [ ] Performance testing -- [ ] User acceptance testing -- [ ] Community feedback - -### Phase 5: Deployment (Months 9-12) -- [ ] Testnet deployment -- [ ] Mainnet launch -- [ ] Governance migration -- [ ] Community onboarding - -## Deliverables - -### Technical Deliverables -1. **Governance Protocol** (Month 4) -2. **AI Assistant** (Month 6) -3. **Cross-Chain Bridge** (Month 8) -4. **Analytics Platform** (Month 10) -5. **Mainnet Deployment** (Month 12) - -### Research Deliverables -1. **Governance Whitepaper** (Month 2) -2. **Technical Papers**: 3 papers -3. **Case Studies**: 5 implementations -4. **Best Practices Guide** (Month 12) - -### Community Deliverables -1. **Education Program**: Governance education -2. **Tools**: Voting and delegation tools -3. **Documentation**: Comprehensive guides -4. **Support**: Community support - -## Resource Requirements - -### Team -- **Principal Investigator** (1): Governance expert -- **Protocol Engineers** (3): Core implementation -- **AI/ML Engineers** (2): AI systems -- **Legal Experts** (2): Compliance and frameworks -- **Community Managers** (2): Community engagement -- **Security Researchers** (2): Security analysis - -### Infrastructure -- **Development Environment**: Multi-chain setup -- **AI Infrastructure**: Model training and serving -- **Analytics Platform**: Data processing -- **Monitoring**: Real-time governance monitoring - -### Budget -- **Personnel**: $6M -- **Infrastructure**: $1.5M -- **Research**: $1M -- **Community**: $1.5M - -## Success Metrics - -### Technical Metrics -- [ ] 100+ governance proposals processed -- [ ] 50%+ voter participation -- [ ] <24h proposal processing time -- [ ] 99.9% uptime -- [ ] Pass 3 security audits - -### Adoption Metrics -- [ ] 10,000+ active voters -- [ ] 100+ delegates -- [ ] 50+ successful proposals -- [ ] 5+ cross-chain implementations -- [ ] 90%+ satisfaction rate - -### Research Metrics -- [ ] 3+ papers accepted -- [ ] 2+ patents filed -- [ ] 10+ academic collaborations -- [ ] Industry recognition -- [ ] Open source adoption - -## Risk Mitigation - -### Technical Risks -1. **Complexity**: Governance systems are complex - - Mitigation: Incremental complexity, testing -2. **AI Reliability**: AI models may be wrong - - Mitigation: Human oversight, confidence scores -3. **Security**: New attack vectors - - Mitigation: Audits, bug bounties - -### Adoption Risks -1. **Voter Apathy**: Low participation - - Mitigation: Incentives, education -2. **Centralization**: Power concentration - - Mitigation: Limits, monitoring -3. **Legal Issues**: Regulatory compliance - - Mitigation: Legal review, compliance - -### Research Risks -1. **Theoretical**: Models may not work - - Mitigation: Empirical validation -2. **Implementation**: Hard to implement - - Mitigation: Prototypes, iteration -3. **Acceptance**: Community may reject - - Mitigation: Community involvement - -## Conclusion - -This research plan establishes a comprehensive approach to blockchain governance that is adaptive, intelligent, and inclusive. The combination of liquid democracy, AI assistance, and cross-chain coordination creates a governance system that can evolve with the network while maintaining decentralization. - -The 12-month timeline with clear deliverables ensures steady progress toward a production-ready governance system. The research outcomes will benefit not only AITBC but the entire blockchain ecosystem by advancing the state of governance technology. - -By focusing on practical implementation and community needs, we ensure that the research translates into real-world impact, enabling more effective and inclusive blockchain governance. - ---- - -*This research plan will evolve based on community feedback and technological advances. Regular reviews ensure alignment with ecosystem needs.* diff --git a/research/consortium/hybrid_pos_research_plan.md b/research/consortium/hybrid_pos_research_plan.md deleted file mode 100644 index 2aaecfcf..00000000 --- a/research/consortium/hybrid_pos_research_plan.md +++ /dev/null @@ -1,432 +0,0 @@ -# Hybrid PoA/PoS Consensus Research Plan - -## Executive Summary - -This research plan outlines the development of a novel hybrid Proof of Authority / Proof of Stake consensus mechanism for the AITBC platform. The hybrid approach aims to combine the fast finality and energy efficiency of PoA with the decentralization and economic security of PoS, specifically optimized for AI/ML workloads and decentralized marketplaces. - -## Research Objectives - -### Primary Objectives -1. **Design a hybrid consensus** that achieves sub-second finality while maintaining decentralization -2. **Reduce energy consumption** by 95% compared to traditional PoW systems -3. **Support high throughput** (10,000+ TPS) for AI workloads -4. **Ensure economic security** through proper stake alignment -5. **Enable dynamic validator sets** based on network demand - -### Secondary Objectives -1. **Implement fair validator selection** resistant to collusion -2. **Develop efficient slashing mechanisms** for misbehavior -3. **Create adaptive difficulty** based on network load -4. **Support cross-chain validation** for interoperability -5. **Optimize for AI-specific requirements** (large data, complex computations) - -## Technical Architecture - -### System Components - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Hybrid Consensus Layer │ -├─────────────────────────────────────────────────────────────┤ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ PoA Core │ │ PoS Overlay │ │ Hybrid Manager │ │ -│ │ │ │ │ │ │ │ -│ │ • Authorities│ │ • Stakers │ │ • Validator Selection│ │ -│ │ • Fast Path │ │ • Slashing │ │ • Weight Calculation│ │ -│ │ • 100ms Final│ │ • Rewards │ │ • Mode Switching │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Economic Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Staking │ │ Rewards │ │ Slashing Pool │ │ -│ │ Pool │ │ Distribution│ │ │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### Hybrid Operation Modes - -#### 1. Fast Mode (PoA Dominant) -- **Conditions**: Low network load, high authority availability -- **Finality**: 100-200ms -- **Throughput**: Up to 50,000 TPS -- **Security**: Authority signatures + stake backup - -#### 2. Balanced Mode (PoA/PoS Equal) -- **Conditions**: Normal network operation -- **Finality**: 500ms-1s -- **Throughput**: 10,000-20,000 TPS -- **Security**: Combined authority and stake validation - -#### 3. Secure Mode (PoS Dominant) -- **Conditions**: High value transactions, low authority participation -- **Finality**: 2-5s -- **Throughput**: 5,000-10,000 TPS -- **Security**: Stake-weighted consensus with authority oversight - -## Research Methodology - -### Phase 1: Theoretical Foundation (Months 1-2) - -#### 1.1 Literature Review -- **Consensus Mechanisms**: Survey of existing hybrid approaches -- **Game Theory**: Analysis of validator incentives and attack vectors -- **Cryptographic Primitives**: VRFs, threshold signatures, BLS aggregation -- **Economic Models**: Staking economics, token velocity, security budgets - -#### 1.2 Mathematical Modeling -- **Security Analysis**: Formal security proofs for each mode -- **Performance Bounds**: Theoretical limits on throughput and latency -- **Economic Equilibrium**: Stake distribution and reward optimization -- **Network Dynamics**: Validator churn and participation rates - -#### 1.3 Simulation Framework -- **Discrete Event Simulation**: Model network behavior under various conditions -- **Agent-Based Modeling**: Simulate rational validator behavior -- **Monte Carlo Analysis**: Probability of different attack scenarios -- **Parameter Sensitivity**: Identify critical system parameters - -### Phase 2: Protocol Design (Months 3-4) - -#### 2.1 Core Protocol Specification -```python -class HybridConsensus: - def __init__(self): - self.authorities = AuthoritySet() - self.stakers = StakerSet() - self.mode = ConsensusMode.BALANCED - self.current_epoch = 0 - - async def propose_block(self, proposer: Validator) -> Block: - """Propose a new block with hybrid validation""" - if self.mode == ConsensusMode.FAST: - return await self._poa_propose(proposer) - elif self.mode == ConsensusMode.BALANCED: - return await self._hybrid_propose(proposer) - else: - return await self._pos_propose(proposer) - - async def validate_block(self, block: Block) -> bool: - """Validate block according to current mode""" - validations = [] - - # Always require authority validation - validations.append(await self._validate_authority_signatures(block)) - - # Require stake validation based on mode - if self.mode in [ConsensusMode.BALANCED, ConsensusMode.SECURE]: - validations.append(await self._validate_stake_signatures(block)) - - return all(validations) -``` - -#### 2.2 Validator Selection Algorithm -```python -class HybridSelector: - def __init__(self, authorities: List[Authority], stakers: List[Staker]): - self.authorities = authorities - self.stakers = stakers - self.vrf = VRF() - - def select_proposer(self, slot: int, mode: ConsensusMode) -> Validator: - """Select block proposer using VRF-based selection""" - if mode == ConsensusMode.FAST: - return self._select_authority(slot) - elif mode == ConsensusMode.BALANCED: - return self._select_hybrid(slot) - else: - return self._select_staker(slot) - - def _select_hybrid(self, slot: int) -> Validator: - """Hybrid selection combining authority and stake""" - # 70% chance for authority, 30% for staker - if self.vrf.evaluate(slot) < 0.7: - return self._select_authority(slot) - else: - return self._select_staker(slot) -``` - -#### 2.3 Economic Model -```python -class HybridEconomics: - def __init__(self): - self.base_reward = 100 # AITBC tokens per block - self.authority_share = 0.6 # 60% to authorities - self.staker_share = 0.4 # 40% to stakers - self.slashing_rate = 0.1 # 10% of stake for misbehavior - - def calculate_rewards(self, block: Block, participants: List[Validator]) -> Dict: - """Calculate and distribute rewards""" - total_reward = self.base_reward * self._get_load_multiplier() - - rewards = {} - authority_reward = total_reward * self.authority_share - staker_reward = total_reward * self.staker_share - - # Distribute to authorities - authorities = [v for v in participants if v.is_authority] - for auth in authorities: - rewards[auth.address] = authority_reward / len(authorities) - - # Distribute to stakers - stakers = [v for v in participants if not v.is_authority] - total_stake = sum(s.stake for s in stakers) - for staker in stakers: - weight = staker.stake / total_stake - rewards[staker.address] = staker_reward * weight - - return rewards -``` - -### Phase 3: Implementation (Months 5-6) - -#### 3.1 Core Components -- **Consensus Engine**: Rust implementation for performance -- **Cryptography Library**: BLS signatures, VRFs -- **Network Layer**: P2P message propagation -- **State Management**: Efficient state transitions - -#### 3.2 Smart Contracts -- **Staking Contract**: Deposit and withdrawal logic -- **Slashing Contract**: Evidence submission and slashing -- **Reward Contract**: Automatic reward distribution -- **Governance Contract**: Parameter updates - -#### 3.3 Integration Layer -- **Blockchain Node**: Integration with existing AITBC node -- **RPC Endpoints**: New consensus-specific endpoints -- **Monitoring**: Metrics and alerting -- **CLI Tools**: Validator management utilities - -### Phase 4: Testing & Validation (Months 7-8) - -#### 4.1 Unit Testing -- **Consensus Logic**: All protocol rules -- **Cryptography**: Signature verification and VRFs -- **Economic Model**: Reward calculations and slashing -- **Edge Cases**: Network partitions, high churn - -#### 4.2 Integration Testing -- **End-to-End**: Full transaction flow -- **Cross-Component**: Node, wallet, explorer integration -- **Performance**: Throughput and latency benchmarks -- **Security**: Attack scenario testing - -#### 4.3 Testnet Deployment -- **Devnet**: Initial deployment with 100 validators -- **Staging**: Larger scale with 1,000 validators -- **Stress Testing**: Maximum throughput and failure scenarios -- **Community Testing**: Public testnet with bug bounty - -### Phase 5: Optimization & Production (Months 9-12) - -#### 5.1 Performance Optimization -- **Parallel Processing**: Concurrent validation -- **Caching**: State and signature caching -- **Network**: Message aggregation and compression -- **Storage**: Efficient state pruning - -#### 5.2 Security Audits -- **Formal Verification**: Critical components -- **Penetration Testing**: External security firm -- **Economic Security**: Game theory analysis -- **Code Review**: Multiple independent reviews - -#### 5.3 Mainnet Preparation -- **Migration Plan**: Smooth transition from PoA -- **Monitoring**: Production-ready observability -- **Documentation**: Comprehensive guides -- **Training**: Validator operator education - -## Technical Specifications - -### Consensus Parameters - -| Parameter | Fast Mode | Balanced Mode | Secure Mode | -|-----------|-----------|---------------|-------------| -| Block Time | 100ms | 500ms | 2s | -| Finality | 200ms | 1s | 5s | -| Max TPS | 50,000 | 20,000 | 10,000 | -| Validators | 21 | 100 | 1,000 | -| Min Stake | N/A | 10,000 AITBC | 1,000 AITBC | - -### Security Assumptions - -1. **Honest Majority**: >2/3 of authorities are honest in Fast mode -2. **Economic Rationality**: Validators act to maximize rewards -3. **Network Bounds**: Message delivery < 100ms in normal conditions -4. **Cryptographic Security**: Underlying primitives remain unbroken -5. **Stake Distribution**: No single entity controls >33% of stake - -### Attack Resistance - -#### 51% Attacks -- **PoA Component**: Requires >2/3 authorities -- **PoS Component**: Requires >2/3 of total stake -- **Hybrid Protection**: Both conditions must be met - -#### Long Range Attacks -- **Checkpointing**: Regular finality checkpoints -- **Weak Subjectivity**: Trusted state for new nodes -- **Slashing**: Evidence submission for equivocation - -#### Censorship -- **Random Selection**: VRF-based proposer selection -- **Timeout Mechanisms**: Automatic proposer rotation -- **Fallback Mode**: Switch to more decentralized mode - -## Deliverables - -### Technical Deliverables -1. **Hybrid Consensus Whitepaper** (Month 3) -2. **Reference Implementation** (Month 6) -3. **Security Audit Report** (Month 9) -4. **Performance Benchmarks** (Month 10) -5. **Mainnet Deployment Guide** (Month 12) - -### Academic Deliverables -1. **Conference Papers**: 3 papers at top blockchain conferences -2. **Journal Articles**: 2 articles in cryptographic journals -3. **Technical Reports**: Monthly progress reports -4. **Open Source**: All code under Apache 2.0 license - -### Industry Deliverables -1. **Implementation Guide**: For enterprise adoption -2. **Best Practices**: Security and operational guidelines -3. **Training Materials**: Validator operator certification -4. **Consulting**: Expert support for early adopters - -## Resource Requirements - -### Team Composition -- **Principal Investigator** (1): Consensus protocol expert -- **Cryptographers** (2): Cryptography and security specialists -- **Systems Engineers** (3): Implementation and optimization -- **Economists** (1): Token economics and game theory -- **Security Researchers** (2): Auditing and penetration testing -- **Project Manager** (1): Coordination and reporting - -### Infrastructure Needs -- **Development Cluster**: 100 nodes for testing -- **Testnet**: 1,000+ validator nodes -- **Compute Resources**: GPU cluster for ZK research -- **Storage**: 100TB for historical data -- **Network**: High-bandwidth for global testing - -### Budget Allocation -- **Personnel**: $4M (40%) -- **Infrastructure**: $1M (10%) -- **Security Audits**: $500K (5%) -- **Travel & Conferences**: $500K (5%) -- **Contingency**: $4M (40%) - -## Risk Mitigation - -### Technical Risks -1. **Complexity**: Hybrid systems are inherently complex - - Mitigation: Incremental development, extensive testing -2. **Performance**: May not meet throughput targets - - Mitigation: Early prototyping, parallel optimization -3. **Security**: New attack vectors possible - - Mitigation: Formal verification, multiple audits - -### Adoption Risks -1. **Migration Difficulty**: Hard to upgrade existing network - - Mitigation: Backward compatibility, gradual rollout -2. **Validator Participation**: May not attract enough stakers - - Mitigation: Attractive rewards, low barriers to entry -3. **Regulatory**: Legal uncertainties - - Mitigation: Legal review, compliance framework - -### Timeline Risks -1. **Research Delays**: Technical challenges may arise - - Mitigation: Parallel workstreams, flexible scope -2. **Team Turnover**: Key personnel may leave - - Mitigation: Knowledge sharing, documentation -3. **External Dependencies**: May rely on external research - - Mitigation: In-house capabilities, partnerships - -## Success Criteria - -### Technical Success -- [ ] Achieve >10,000 TPS in Balanced mode -- [ ] Maintain <1s finality in normal conditions -- [ ] Withstand 51% attacks with <33% stake/authority -- [ ] Pass 3 independent security audits -- [ ] Handle 1,000+ validators efficiently - -### Adoption Success -- [ ] 50% of existing authorities participate -- [ ] 1,000+ new validators join -- [ ] 10+ enterprise partners adopt -- [ ] 5+ other blockchain projects integrate -- [ ] Community approval >80% - -### Research Success -- [ ] 3+ papers accepted at top conferences -- [ ] 2+ patents filed -- [ ] Open source project 1,000+ GitHub stars -- [ ] 10+ academic collaborations -- [ ] Industry recognition and awards - -## Timeline - -### Month 1-2: Foundation -- Literature review complete -- Mathematical models developed -- Simulation framework built -- Initial team assembled - -### Month 3-4: Design -- Protocol specification complete -- Economic model finalized -- Security analysis done -- Whitepaper published - -### Month 5-6: Implementation -- Core protocol implemented -- Smart contracts deployed -- Integration with AITBC node -- Initial testing complete - -### Month 7-8: Validation -- Comprehensive testing done -- Testnet deployed -- Security audits initiated -- Community feedback gathered - -### Month 9-10: Optimization -- Performance optimized -- Security issues resolved -- Documentation complete -- Migration plan ready - -### Month 11-12: Production -- Mainnet deployment -- Monitoring systems active -- Training program launched -- Research published - -## Next Steps - -1. **Immediate (Next 30 days)** - - Finalize research team - - Set up development environment - - Begin literature review - - Establish partnerships - -2. **Short-term (Next 90 days)** - - Complete theoretical foundation - - Publish initial whitepaper - - Build prototype implementation - - Start community engagement - -3. **Long-term (Next 12 months)** - - Deliver production-ready system - - Achieve widespread adoption - - Establish thought leadership - - Enable next-generation applications - ---- - -*This research plan represents a significant advancement in blockchain consensus technology, combining the best aspects of existing approaches while addressing the specific needs of AI/ML workloads and decentralized marketplaces.* diff --git a/research/consortium/scaling_research_plan.md b/research/consortium/scaling_research_plan.md deleted file mode 100644 index f3f4b019..00000000 --- a/research/consortium/scaling_research_plan.md +++ /dev/null @@ -1,477 +0,0 @@ -# Blockchain Scaling Research Plan - -## Executive Summary - -This research plan addresses blockchain scalability through sharding and rollup architectures, targeting throughput of 100,000+ TPS while maintaining decentralization and security. The research focuses on practical implementations suitable for AI/ML workloads, including state sharding for large model storage, ZK-rollups for privacy-preserving computations, and hybrid rollup strategies optimized for decentralized marketplaces. - -## Research Objectives - -### Primary Objectives -1. **Achieve 100,000+ TPS** through horizontal scaling -2. **Support AI workloads** with efficient state management -3. **Maintain security** across sharded architecture -4. **Enable cross-shard communication** with minimal overhead -5. **Implement dynamic sharding** based on network demand - -### Secondary Objectives -1. **Optimize for large data** (model weights, datasets) -2. **Support complex computations** (AI inference, training) -3. **Ensure interoperability** with existing chains -4. **Minimize validator requirements** for broader participation -5. **Provide developer-friendly abstractions** - -## Technical Architecture - -### Sharding Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Beacon Chain │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Random │ │ Cross-Shard │ │ State Management │ │ -│ │ Sampling │ │ Messaging │ │ Coordinator │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────┬───────────────────────────────────────────┘ - │ - ┌─────────────┴─────────────┐ - │ Shard Chains │ - │ ┌─────┐ ┌─────┐ ┌─────┐ │ - │ │ S0 │ │ S1 │ │ S2 │ │ - │ │ │ │ │ │ │ │ - │ │ AI │ │ DeFi│ │ NFT │ │ - │ └─────┘ └─────┘ └─────┘ │ - └───────────────────────────┘ -``` - -### Rollup Stack - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Layer 1 (Base) │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ State │ │ Data │ │ Execution │ │ - │ Roots │ │ Availability │ │ Environment │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────┬───────────────────────────────────────────┘ - │ - ┌─────────────┴─────────────┐ - │ Layer 2 Rollups │ - │ ┌─────────┐ ┌─────────┐ │ - │ │ ZK-Rollup│ │Optimistic│ │ - │ │ │ │ Rollup │ │ - │ │ Privacy │ │ Speed │ │ - │ └─────────┘ └─────────┘ │ - └───────────────────────────┘ -``` - -## Research Methodology - -### Phase 1: Architecture Design (Months 1-2) - -#### 1.1 Sharding Design -- **State Sharding**: Partition state across shards -- **Transaction Sharding**: Route transactions to appropriate shards -- **Cross-Shard Communication**: Efficient message passing -- **Validator Assignment**: Random sampling with stake weighting - -#### 1.2 Rollup Design -- **ZK-Rollup**: Privacy-preserving computations -- **Optimistic Rollup**: High throughput for simple operations -- **Hybrid Approach**: Dynamic selection based on operation type -- **Data Availability**: Ensuring data accessibility - -#### 1.3 Integration Design -- **Unified Interface**: Seamless interaction between shards and rollups -- **State Synchronization**: Consistent state across layers -- **Security Model**: Shared security across all components -- **Developer SDK**: Abstractions for easy development - -### Phase 2: Protocol Specification (Months 3-4) - -#### 2.1 Sharding Protocol -```python -class ShardingProtocol: - def __init__(self, num_shards: int, beacon_chain: BeaconChain): - self.num_shards = num_shards - self.beacon_chain = beacon_chain - self.shard_managers = [ShardManager(i) for i in range(num_shards)] - - def route_transaction(self, tx: Transaction) -> ShardId: - """Route transaction to appropriate shard""" - if tx.is_cross_shard(): - return self.beacon_chain.handle_cross_shard(tx) - else: - shard_id = self.calculate_shard_id(tx) - return self.shard_managers[shard_id].submit_transaction(tx) - - def calculate_shard_id(self, tx: Transaction) -> int: - """Calculate target shard for transaction""" - # Use transaction hash for deterministic routing - return int(hash(tx.hash) % self.num_shards) - - async def execute_cross_shard_tx(self, tx: CrossShardTransaction): - """Execute cross-shard transaction""" - # Lock accounts on all involved shards - locks = await self.acquire_cross_shard_locks(tx.involved_shards) - - try: - # Execute transaction atomically - results = [] - for shard_id in tx.involved_shards: - result = await self.shard_managers[shard_id].execute(tx) - results.append(result) - - # Commit if all executions succeed - await self.commit_cross_shard_tx(tx, results) - except Exception as e: - # Rollback on failure - await self.rollback_cross_shard_tx(tx) - raise e - finally: - # Release locks - await self.release_cross_shard_locks(locks) -``` - -#### 2.2 Rollup Protocol -```python -class RollupProtocol: - def __init__(self, layer1: Layer1, rollup_type: RollupType): - self.layer1 = layer1 - self.rollup_type = rollup_type - self.state = RollupState() - - async def submit_batch(self, batch: TransactionBatch): - """Submit batch of transactions to Layer 1""" - if self.rollup_type == RollupType.ZK: - # Generate ZK proof for batch - proof = await self.generate_zk_proof(batch) - await self.layer1.submit_zk_batch(batch, proof) - else: - # Submit optimistic batch - await self.layer1.submit_optimistic_batch(batch) - - async def generate_zk_proof(self, batch: TransactionBatch) -> ZKProof: - """Generate zero-knowledge proof for batch""" - # Create computation circuit - circuit = self.create_batch_circuit(batch) - - # Generate witness - witness = self.generate_witness(batch, self.state) - - # Generate proof - proving_key = await self.load_proving_key() - proof = await zk_prove(circuit, witness, proving_key) - - return proof - - async def verify_batch(self, batch: TransactionBatch, proof: ZKProof) -> bool: - """Verify batch validity""" - if self.rollup_type == RollupType.ZK: - # Verify ZK proof - circuit = self.create_batch_circuit(batch) - verification_key = await self.load_verification_key() - return await zk_verify(circuit, proof, verification_key) - else: - # Optimistic rollup - assume valid unless challenged - return True -``` - -#### 2.3 AI-Specific Optimizations -```python -class AIShardManager(ShardManager): - def __init__(self, shard_id: int, specialization: AISpecialization): - super().__init__(shard_id) - self.specialization = specialization - self.model_cache = ModelCache() - self.compute_pool = ComputePool() - - async def execute_inference(self, inference_tx: InferenceTransaction): - """Execute AI inference transaction""" - # Load model from cache or storage - model = await self.model_cache.get(inference_tx.model_id) - - # Allocate compute resources - compute_node = await self.compute_pool.allocate( - inference_tx.compute_requirements - ) - - try: - # Execute inference - result = await compute_node.run_inference( - model, inference_tx.input_data - ) - - # Verify result with ZK proof - proof = await self.generate_inference_proof( - model, inference_tx.input_data, result - ) - - # Update state - await self.update_inference_state(inference_tx, result, proof) - - return result - finally: - # Release compute resources - await self.compute_pool.release(compute_node) - - async def store_model(self, model_tx: ModelStorageTransaction): - """Store AI model on shard""" - # Compress model for storage - compressed_model = await self.compress_model(model_tx.model) - - # Split across multiple shards if large - if len(compressed_model) > self.shard_capacity: - shards = await self.split_model(compressed_model) - for i, shard_data in enumerate(shards): - await self.store_model_shard(model_tx.model_id, i, shard_data) - else: - await self.store_model_single(model_tx.model_id, compressed_model) - - # Update model registry - await self.update_model_registry(model_tx) -``` - -### Phase 3: Implementation (Months 5-6) - -#### 3.1 Core Components -- **Beacon Chain**: Coordination and randomness -- **Shard Chains**: Individual shard implementations -- **Rollup Contracts**: Layer 1 integration contracts -- **Cross-Shard Messaging**: Communication protocol -- **State Manager**: State synchronization - -#### 3.2 AI/ML Components -- **Model Storage**: Efficient large model storage -- **Inference Engine**: On-chain inference execution -- **Data Pipeline**: Training data handling -- **Result Verification**: ZK proofs for computations - -#### 3.3 Developer Tools -- **SDK**: Multi-language development kit -- **Testing Framework**: Shard-aware testing -- **Deployment Tools**: Automated deployment -- **Monitoring**: Cross-shard observability - -### Phase 4: Testing & Optimization (Months 7-8) - -#### 4.1 Performance Testing -- **Throughput**: Measure TPS per shard and total -- **Latency**: Cross-shard transaction latency -- **Scalability**: Performance with increasing shards -- **Resource Usage**: Validator requirements - -#### 4.2 Security Testing -- **Attack Scenarios**: Various attack vectors -- **Fault Tolerance**: Shard failure handling -- **State Consistency**: Cross-shard state consistency -- **Privacy**: ZK proof security - -#### 4.3 AI Workload Testing -- **Model Storage**: Large model storage efficiency -- **Inference Performance**: On-chain inference speed -- **Data Throughput**: Training data handling -- **Cost Analysis**: Gas optimization - -## Technical Specifications - -### Sharding Parameters - -| Parameter | Value | Description | -|-----------|-------|-------------| -| Number of Shards | 64-1024 | Dynamically adjustable | -| Shard Size | 100-500 MB | State per shard | -| Cross-Shard Latency | <500ms | Message passing | -| Validator per Shard | 100-1000 | Randomly sampled | -| Shard Block Time | 500ms | Individual shard | - -### Rollup Parameters - -| Parameter | ZK-Rollup | Optimistic | -|-----------|-----------|------------| -| TPS | 20,000 | 50,000 | -| Finality | 10 minutes | 1 week | -| Gas per TX | 500-2000 | 100-500 | -| Data Availability | On-chain | Off-chain | -| Privacy | Full | None | - -### AI-Specific Parameters - -| Parameter | Value | Description | -|-----------|-------|-------------| -| Max Model Size | 10GB | Per model | -| Inference Time | <5s | Per inference | -| Parallelism | 1000 | Concurrent inferences | -| Proof Generation | 30s | ZK proof time | -| Storage Cost | $0.01/GB/month | Model storage | - -## Security Analysis - -### Sharding Security - -#### 1. Single-Shard Takeover -- **Attack**: Control majority of validators in one shard -- **Defense**: Random validator assignment, stake requirements -- **Detection**: Beacon chain monitoring, slash conditions - -#### 2. Cross-Shard Replay -- **Attack**: Replay transaction across shards -- **Defense**: Nonce management, shard-specific signatures -- **Detection**: Transaction deduplication - -#### 3. State Corruption -- **Attack**: Corrupt state in one shard -- **Defense**: State roots, fraud proofs -- **Detection**: Merkle proof verification - -### Rollup Security - -#### 1. Invalid State Transition -- **Attack**: Submit invalid batch to Layer 1 -- **Defense**: ZK proofs, fraud proofs -- **Detection**: Challenge period, verification - -#### 2. Data Withholding -- **Attack**: Withhold transaction data -- **Defense**: Data availability proofs -- **Detection**: Availability checks - -#### 3. Exit Scams -- **Attack**: Operator steals funds -- **Defense**: Withdrawal delays, guardians -- **Detection**: Watchtower monitoring - -## Implementation Plan - -### Phase 1: Foundation (Months 1-2) -- [ ] Complete architecture design -- [ ] Specify protocols and interfaces -- [ ] Create development environment -- [ ] Set up test infrastructure - -### Phase 2: Core Development (Months 3-4) -- [ ] Implement beacon chain -- [ ] Develop shard chains -- [ ] Create rollup contracts -- [ ] Build cross-shard messaging - -### Phase 3: AI Integration (Months 5-6) -- [ ] Implement model storage -- [ ] Build inference engine -- [ ] Create ZK proof circuits -- [ ] Optimize gas usage - -### Phase 4: Testing (Months 7-8) -- [ ] Performance benchmarking -- [ ] Security audits -- [ ] AI workload testing -- [ ] Community testing - -### Phase 5: Deployment (Months 9-12) -- [ ] Testnet deployment -- [ ] Mainnet preparation -- [ ] Developer onboarding -- [ ] Documentation - -## Deliverables - -### Technical Deliverables -1. **Sharding Protocol Specification** (Month 2) -2. **Rollup Implementation** (Month 4) -3. **AI/ML Integration Layer** (Month 6) -4. **Performance Benchmarks** (Month 8) -5. **Mainnet Deployment** (Month 12) - -### Research Deliverables -1. **Conference Papers**: 2 papers on sharding and rollups -2. **Technical Reports**: Quarterly progress reports -3. **Open Source**: All code under permissive license -4. **Standards**: Proposals for industry standards - -### Community Deliverables -1. **Developer Documentation**: Comprehensive guides -2. **Tutorials**: AI/ML on blockchain examples -3. **Tools**: SDK and development tools -4. **Support**: Community support channels - -## Resource Requirements - -### Team -- **Principal Investigator** (1): Scaling and distributed systems -- **Protocol Engineers** (3): Core protocol implementation -- **AI/ML Engineers** (2): AI-specific optimizations -- **Cryptography Engineers** (2): ZK proofs and security -- **Security Researchers** (2): Security analysis and audits -- **DevOps Engineers** (1): Infrastructure and deployment - -### Infrastructure -- **Development Cluster**: 64 nodes for sharding tests -- **AI Compute**: GPU cluster for model testing -- **Storage**: 1PB for model storage tests -- **Network**: High-bandwidth for cross-shard testing - -### Budget -- **Personnel**: $6M -- **Infrastructure**: $2M -- **Security Audits**: $1M -- **Community**: $1M - -## Success Metrics - -### Technical Metrics -- [ ] Achieve 100,000+ TPS total throughput -- [ ] Maintain <1s cross-shard latency -- [ ] Support 10GB+ model storage -- [ ] Handle 1,000+ concurrent inferences -- [ ] Pass 3 security audits - -### Adoption Metrics -- [ ] 100+ DApps deployed on sharded network -- [ ] 10+ AI models running on-chain -- [ ] 1,000+ active developers -- [ ] 50,000+ daily active users -- [ ] 5+ enterprise partnerships - -### Research Metrics -- [ ] 2+ papers accepted at top conferences -- [ ] 3+ patents filed -- [ ] 10+ academic collaborations -- [ ] Open source project with 5,000+ stars -- [ ] Industry recognition - -## Risk Mitigation - -### Technical Risks -1. **Complexity**: Sharding adds significant complexity - - Mitigation: Incremental development, extensive testing -2. **State Bloat**: Large AI models increase state size - - Mitigation: Compression, pruning, archival nodes -3. **Cross-Shard Overhead**: Communication may be expensive - - Mitigation: Batch operations, efficient routing - -### Security Risks -1. **Shard Isolation**: Security issues in one shard - - Mitigation: Shared security, monitoring -2. **Centralization**: Large validators may dominate - - Mitigation: Stake limits, random assignment -3. **ZK Proof Risks**: Cryptographic vulnerabilities - - Mitigation: Multiple implementations, audits - -### Adoption Risks -1. **Developer Complexity**: Harder to develop for sharded chain - - Mitigation: Abstractions, SDK, documentation -2. **Migration Difficulty**: Hard to move from monolithic - - Mitigation: Migration tools, backward compatibility -3. **Competition**: Other scaling solutions - - Mitigation: AI-specific optimizations, partnerships - -## Conclusion - -This research plan presents a comprehensive approach to blockchain scaling through sharding and rollups, specifically optimized for AI/ML workloads. The combination of horizontal scaling through sharding and computation efficiency through rollups provides a path to 100,000+ TPS while maintaining security and decentralization. - -The focus on AI-specific optimizations, including efficient model storage, on-chain inference, and privacy-preserving computations, positions AITBC as the leading platform for decentralized AI applications. - -The 12-month timeline with clear milestones and deliverables ensures steady progress toward production-ready implementation. The research outcomes will not only benefit AITBC but contribute to the broader blockchain ecosystem. - ---- - -*This research plan will evolve as we learn from implementation and community feedback. Regular reviews and updates ensure the research remains aligned with ecosystem needs.* diff --git a/research/consortium/whitepapers/hybrid_consensus_v1.md b/research/consortium/whitepapers/hybrid_consensus_v1.md deleted file mode 100644 index 7ab2c4f8..00000000 --- a/research/consortium/whitepapers/hybrid_consensus_v1.md +++ /dev/null @@ -1,411 +0,0 @@ -# Hybrid Proof of Authority / Proof of Stake Consensus for AI Workloads - -**Version**: 1.0 -**Date**: January 2024 -**Authors**: AITBC Research Consortium -**Status**: Draft - -## Abstract - -This paper presents a novel hybrid consensus mechanism combining Proof of Authority (PoA) and Proof of Stake (PoS) to achieve high throughput, fast finality, and robust security for blockchain networks supporting AI/ML workloads. Our hybrid approach dynamically adjusts between three operational modes—Fast, Balanced, and Secure—optimizing for current network conditions while maintaining economic security through stake-based validation. The protocol achieves sub-second finality in normal conditions, scales to 50,000 TPS, reduces energy consumption by 95% compared to Proof of Work, and provides resistance to 51% attacks through a dual-security model. We present the complete protocol specification, security analysis, economic model, and implementation results from our testnet deployment. - -## 1. Introduction - -### 1.1 Background - -Blockchain consensus mechanisms face a fundamental trilemma between decentralization, security, and scalability. Existing solutions make trade-offs that limit their suitability for AI/ML workloads, which require high throughput for data-intensive computations, fast finality for real-time inference, and robust security for valuable model assets. - -Current approaches have limitations: -- **Proof of Work**: High energy consumption, low throughput (~15 TPS) -- **Proof of Stake**: Slow finality (~12-60 seconds), limited scalability -- **Proof of Authority**: Centralization concerns, limited economic security -- **Existing Hybrids**: Fixed parameters, unable to adapt to network conditions - -### 1.2 Contributions - -This paper makes several key contributions: -1. **Dynamic Hybrid Consensus**: First protocol to dynamically balance PoA and PoS based on network conditions -2. **Three-Mode Operation**: Fast (100ms finality), Balanced (1s finality), Secure (5s finality) modes -2. **AI-Optimized Design**: Specifically optimized for AI/ML workload requirements -3. **Economic Security Model**: Novel stake-weighted authority selection with slashing mechanisms -4. **Complete Implementation**: Open-source reference implementation with testnet results - -### 1.3 Paper Organization - -Section 2 presents related work. Section 3 describes the system model and assumptions. Section 4 details the hybrid consensus protocol. Section 5 analyzes security properties. Section 6 presents the economic model. Section 7 describes implementation and evaluation. Section 8 concludes and discusses future work. - -## 2. Related Work - -### 2.1 Consensus Mechanisms - -#### Proof of Authority -PoA [1] uses authorized validators to sign blocks, providing fast finality but limited decentralization. Notable implementations include Ethereum's Clique consensus and Hyperledger Fabric. - -#### Proof of Stake -PoS [2] uses economic stake for security, improving energy efficiency but with slower finality. Examples include Ethereum 2.0, Cardano, and Polkadot. - -#### Hybrid Approaches -Several hybrid approaches exist: -- **Dfinity** [3]: Combines threshold signatures with randomness -- **Algorand** [4]: Uses cryptographic sortition for validator selection -- **Avalanche** [5]: Uses metastable consensus for fast confirmation - -Our approach differs by dynamically adjusting the PoA/PoS balance based on network conditions. - -### 2.2 AI/ML on Blockchain - -Recent work has explored running AI/ML workloads on blockchain [6,7]. These systems require high throughput and fast finality, motivating our design choices. - -## 3. System Model - -### 3.1 Network Model - -We assume a partially synchronous network [8] with: -- Message delivery delay Δ < 100ms in normal conditions -- Network partitions possible but rare -- Byzantine actors may control up to 1/3 of authorities or stake - -### 3.2 Participants - -#### Authorities (A) -- Known, permissioned validators -- Required to stake minimum bond (10,000 AITBC) -- Responsible for fast path validation -- Subject to slashing for misbehavior - -#### Stakers (S) -- Permissionless validators -- Stake any amount (minimum 1,000 AITBC) -- Participate in security validation -- Selected via VRF-based sortition - -#### Users (U) -- Submit transactions and smart contracts -- May also be authorities or stakers - -### 3.3 Threat Model - -We protect against: -- **51% Attacks**: Require >2/3 authorities AND >2/3 stake -- **Censorship**: Random proposer selection with timeouts -- **Long Range**: Weak subjectivity with checkpoints -- **Nothing at Stake**: Slashing for equivocation - -## 4. Protocol Design - -### 4.1 Overview - -The hybrid consensus operates in three modes: - -```python -class ConsensusMode(Enum): - FAST = "fast" # PoA dominant, 100ms finality - BALANCED = "balanced" # Equal PoA/PoS, 1s finality - SECURE = "secure" # PoS dominant, 5s finality - -class HybridConsensus: - def __init__(self): - self.mode = ConsensusMode.BALANCED - self.authorities = AuthoritySet() - self.stakers = StakerSet() - self.vrf = VRF() - - def determine_mode(self) -> ConsensusMode: - """Determine optimal mode based on network conditions""" - load = self.get_network_load() - auth_availability = self.get_authority_availability() - stake_participation = self.get_stake_participation() - - if load < 0.3 and auth_availability > 0.9: - return ConsensusMode.FAST - elif load > 0.7 or stake_participation > 0.8: - return ConsensusMode.SECURE - else: - return ConsensusMode.BALANCED -``` - -### 4.2 Block Proposal - -Block proposers are selected using VRF-based sortition: - -```python -def select_proposer(self, slot: int, mode: ConsensusMode) -> Validator: - """Select block proposer for given slot""" - seed = self.vrf.evaluate(f"propose-{slot}") - - if mode == ConsensusMode.FAST: - # Authority-only selection - return self.authorities.select(seed) - elif mode == ConsensusMode.BALANCED: - # 70% authority, 30% staker - if seed < 0.7: - return self.authorities.select(seed) - else: - return self.stakers.select(seed) - else: # SECURE - # Stake-weighted selection - return self.stakers.select_weighted(seed) -``` - -### 4.3 Block Validation - -Blocks require signatures based on the current mode: - -```python -def validate_block(self, block: Block) -> bool: - """Validate block according to current mode""" - validations = [] - - # Always require authority signatures - auth_threshold = self.get_authority_threshold(block.mode) - auth_sigs = block.get_authority_signatures() - validations.append(len(auth_sigs) >= auth_threshold) - - # Require stake signatures in BALANCED and SECURE modes - if block.mode in [ConsensusMode.BALANCED, ConsensusMode.SECURE]: - stake_threshold = self.get_stake_threshold(block.mode) - stake_sigs = block.get_stake_signatures() - validations.append(len(stake_sigs) >= stake_threshold) - - return all(validations) -``` - -### 4.4 Mode Transitions - -Mode transitions occur smoothly with overlapping validation: - -```python -def transition_mode(self, new_mode: ConsensusMode): - """Transition to new consensus mode""" - if new_mode == self.mode: - return - - # Gradual transition over 10 blocks - for i in range(10): - weight = i / 10.0 - self.set_mode_weight(new_mode, weight) - self.wait_for_block() - - self.mode = new_mode -``` - -## 5. Security Analysis - -### 5.1 Safety - -Theorem 1 (Safety): The hybrid consensus maintains safety under the assumption that less than 1/3 of authorities or 1/3 of stake are Byzantine. - -*Proof*: -- In FAST mode: Requires 2/3+1 authority signatures -- In BALANCED mode: Requires 2/3+1 authority AND 2/3 stake signatures -- In SECURE mode: Requires 2/3 stake signatures with authority oversight -- Byzantine participants cannot forge valid signatures -- Therefore, two conflicting blocks cannot both be finalized ∎ - -### 5.2 Liveness - -Theorem 2 (Liveness): The system makes progress as long as at least 2/3 of authorities are honest and network is synchronous. - -*Proof*: -- Honest authorities follow protocol and propose valid blocks -- Network delivers messages within Δ time -- VRF ensures eventual proposer selection -- Timeouts prevent deadlock -- Therefore, new blocks are eventually produced ∎ - -### 5.3 Economic Security - -The economic model ensures: -- **Slashing**: Misbehavior results in loss of staked tokens -- **Rewards**: Honest participation earns block rewards and fees -- **Bond Requirements**: Minimum stakes prevent Sybil attacks -- **Exit Barriers**: Unbonding periods discourage sudden exits - -### 5.4 Attack Resistance - -#### 51% Attack Resistance -To successfully attack the network, an adversary must control: -- >2/3 of authorities AND >2/3 of stake (BALANCED mode) -- >2/3 of authorities (FAST mode) -- >2/3 of stake (SECURE mode) - -This makes attacks economically prohibitive. - -#### Censorship Resistance -- Random proposer selection prevents targeted censorship -- Timeouts trigger automatic proposer rotation -- Multiple modes provide fallback options - -#### Long Range Attack Resistance -- Weak subjectivity checkpoints every 100,000 blocks -- Stake slashing for equivocation -- Recent state verification requirements - -## 6. Economic Model - -### 6.1 Reward Distribution - -Block rewards are distributed based on mode and participation: - -```python -def calculate_rewards(self, block: Block) -> Dict[str, float]: - """Calculate reward distribution for block""" - base_reward = 100 # AITBC tokens - - if block.mode == ConsensusMode.FAST: - authority_share = 0.8 - staker_share = 0.2 - elif block.mode == ConsensusMode.BALANCED: - authority_share = 0.6 - staker_share = 0.4 - else: # SECURE - authority_share = 0.4 - staker_share = 0.6 - - rewards = {} - - # Distribute to authorities - auth_reward = base_reward * authority_share - auth_count = len(block.authority_signatures) - for auth in block.authority_signatures: - rewards[auth.validator] = auth_reward / auth_count - - # Distribute to stakers - stake_reward = base_reward * staker_share - total_stake = sum(sig.stake for sig in block.stake_signatures) - for sig in block.stake_signatures: - weight = sig.stake / total_stake - rewards[sig.validator] = stake_reward * weight - - return rewards -``` - -### 6.2 Staking Economics - -- **Minimum Stake**: 1,000 AITBC for stakers, 10,000 for authorities -- **Unbonding Period**: 21 days (prevents long range attacks) -- **Slashing**: 10% of stake for equivocation, 5% for unavailability -- **Reward Rate**: ~5-15% APY depending on mode and participation - -### 6.3 Tokenomics - -The AITBC token serves multiple purposes: -- **Staking**: Security collateral for network participation -- **Gas**: Payment for transaction execution -- **Governance**: Voting on protocol parameters -- **Rewards**: Incentive for honest participation - -## 7. Implementation - -### 7.1 Architecture - -Our implementation consists of: - -1. **Consensus Engine** (Rust): Core protocol logic -2. **Cryptography Library** (Rust): BLS signatures, VRFs -3. **Smart Contracts** (Solidity): Staking, slashing, rewards -4. **Network Layer** (Go): P2P message propagation -5. **API Layer** (Go): JSON-RPC and WebSocket endpoints - -### 7.2 Performance Results - -Testnet results with 1,000 validators: - -| Metric | Fast Mode | Balanced Mode | Secure Mode | -|--------|-----------|---------------|-------------| -| TPS | 45,000 | 18,500 | 9,200 | -| Finality | 150ms | 850ms | 4.2s | -| Latency (p50) | 80ms | 400ms | 2.1s | -| Latency (p99) | 200ms | 1.2s | 6.8s | - -### 7.3 Security Audit Results - -Independent security audit found: -- 0 critical vulnerabilities -- 2 medium severity (fixed) -- 5 low severity (documented) - -## 8. Evaluation - -### 8.1 Comparison with Existing Systems - -| System | TPS | Finality | Energy Use | Decentralization | -|--------|-----|----------|------------|-----------------| -| Bitcoin | 7 | 60m | High | High | -| Ethereum | 15 | 13m | High | High | -| Ethereum 2.0 | 100,000 | 12s | Low | High | -| Our Hybrid | 50,000 | 100ms-5s | Low | Medium-High | - -### 8.2 AI Workload Performance - -Tested with common AI workloads: -- **Model Inference**: 10,000 inferences/second -- **Training Data Upload**: 1GB/second throughput -- **Result Verification**: Sub-second confirmation - -## 9. Discussion - -### 9.1 Design Trade-offs - -Our approach makes several trade-offs: -- **Complexity**: Hybrid system is more complex than single consensus -- **Configuration**: Requires tuning of mode transition parameters -- **Bootstrapping**: Initial authority set needed for network launch - -### 9.2 Limitations - -Current limitations include: -- **Authority Selection**: Initial authorities must be trusted -- **Mode Switching**: Transition periods may have reduced performance -- **Economic Assumptions**: Relies on rational validator behavior - -### 9.3 Future Work - -Future improvements could include: -- **ZK Integration**: Zero-knowledge proofs for privacy -- **Cross-Chain**: Interoperability with other networks -- **AI Integration**: On-chain AI model execution -- **Dynamic Parameters**: AI-driven parameter optimization - -## 10. Conclusion - -We presented a novel hybrid PoA/PoS consensus mechanism that dynamically adapts to network conditions while maintaining security and achieving high performance. Our implementation demonstrates the feasibility of the approach with testnet results showing 45,000 TPS with 150ms finality in Fast mode. - -The hybrid design provides a practical solution for blockchain networks supporting AI/ML workloads, offering the speed of PoA when needed and the security of PoS when required. This makes it particularly suitable for decentralized AI marketplaces, federated learning networks, and other high-performance blockchain applications. - -## References - -[1] Clique Proof of Authority Consensus, Ethereum Foundation, 2017 -[2] Proof of Stake Design, Vitalik Buterin, 2020 -[3] Dfinity Consensus, Dfinity Foundation, 2018 -[4] Algorand Consensus, Silvio Micali, 2019 -[5] Avalanche Consensus, Team Rocket, 2020 -[6] AI on Blockchain: A Survey, IEEE, 2023 -[7] Federated Learning on Blockchain, Nature, 2023 -[8] Partial Synchrony, Dwork, Lynch, Stockmeyer, 1988 - -## Appendices - -### A. Protocol Parameters - -Full list of configurable parameters and their default values. - -### B. Security Proofs - -Detailed formal security proofs for all theorems. - -### C. Implementation Details - -Additional implementation details and code examples. - -### D. Testnet Configuration - -Testnet network configuration and deployment instructions. - ---- - -**License**: This work is licensed under the Creative Commons Attribution 4.0 International License. - -**Contact**: research@aitbc.io - -**Acknowledgments**: We thank the AITBC Research Consortium members and partners for their valuable feedback and support. diff --git a/research/consortium/zk_applications_research_plan.md b/research/consortium/zk_applications_research_plan.md deleted file mode 100644 index b6e590fd..00000000 --- a/research/consortium/zk_applications_research_plan.md +++ /dev/null @@ -1,654 +0,0 @@ -# Zero-Knowledge Applications Research Plan - -## Executive Summary - -This research plan explores advanced zero-knowledge (ZK) applications for the AITBC platform, focusing on privacy-preserving AI computations, verifiable machine learning, and scalable ZK proof systems. The research aims to make AITBC the leading platform for privacy-preserving AI/ML workloads while advancing the state of ZK technology through novel circuit designs and optimization techniques. - -## Research Objectives - -### Primary Objectives -1. **Enable Private AI Inference** without revealing models or data -2. **Implement Verifiable ML** with proof of correct computation -3. **Scale ZK Proofs** to handle large AI models efficiently -4. **Create ZK Dev Tools** for easy application development -5. **Standardize ZK Protocols** for interoperability - -### Secondary Objectives -1. **Reduce Proof Generation Time** by 90% through optimization -2. **Support Recursive Proofs** for complex workflows -3. **Enable ZK Rollups** with AI-specific optimizations -4. **Create ZK Marketplace** for privacy-preserving services -5. **Develop ZK Identity** for anonymous AI agents - -## Technical Architecture - -### ZK Stack Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Application Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ AI/ML │ │ DeFi │ │ Identity │ │ -│ │ Services │ │ Applications │ │ Systems │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ ZK Abstraction Layer │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Circuit │ │ Proof │ │ Verification │ │ -│ │ Builder │ │ Generator │ │ Engine │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -├─────────────────────────────────────────────────────────────┤ -│ Core ZK Infrastructure │ -│ ┌─────────────┐ ┌──────────────┐ ┌─────────────────────┐ │ -│ │ Groth16 │ │ PLONK │ │ Halo2 │ │ -│ │ Prover │ │ Prover │ │ Prover │ │ -│ └─────────────┘ └──────────────┘ └─────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -### AI-Specific ZK Applications - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Privacy-Preserving AI │ -│ │ -│ Input Data ──┐ │ -│ ├───► ZK Circuit ──┐ │ -│ Model Weights─┘ │ │ -│ ├───► ZK Proof ──► Result │ -│ Computation ──────────────────┘ │ -│ │ -│ ✓ Private inference without revealing model │ -│ ✓ Verifiable computation with proof │ -│ ✓ Composable proofs for complex workflows │ -└─────────────────────────────────────────────────────────────┘ -``` - -## Research Methodology - -### Phase 1: Foundation (Months 1-2) - -#### 1.1 ZK Circuit Design for AI -- **Neural Network Circuits**: Efficient ZK circuits for common layers -- **Optimization Techniques**: Reducing constraint count -- **Lookup Tables**: Optimizing non-linear operations -- **Recursive Composition**: Building complex proofs from simple ones - -#### 1.2 Proof System Optimization -- **Prover Performance**: GPU/ASIC acceleration -- **Verifier Efficiency**: Constant-time verification -- **Proof Size**: Minimizing proof bandwidth -- **Parallelization**: Multi-core proving strategies - -#### 1.3 Privacy Model Design -- **Data Privacy**: Protecting input/output data -- **Model Privacy**: Protecting model parameters -- **Computation Privacy**: Hiding computation patterns -- **Composition Privacy**: Composable privacy guarantees - -### Phase 2: Implementation (Months 3-4) - -#### 2.1 Core ZK Library -```python -class ZKProver: - def __init__(self, proving_system: ProvingSystem): - self.proving_system = proving_system - self.circuit_cache = CircuitCache() - self.proving_key_cache = ProvingKeyCache() - - async def prove_inference( - self, - model: NeuralNetwork, - input_data: Tensor, - witness: Optional[Tensor] = None - ) -> ZKProof: - """Generate ZK proof for model inference""" - - # Build or retrieve circuit - circuit = await self.circuit_cache.get_or_build(model) - - # Generate witness - if witness is None: - witness = await self.generate_witness(model, input_data) - - # Load proving key - proving_key = await self.proving_key_cache.get(circuit.id) - - # Generate proof - proof = await self.proving_system.prove( - circuit, witness, proving_key - ) - - return proof - - async def verify_inference( - self, - proof: ZKProof, - public_inputs: PublicInputs, - circuit_id: str - ) -> bool: - """Verify ZK proof of inference""" - - # Load verification key - verification_key = await self.load_verification_key(circuit_id) - - # Verify proof - return await self.proving_system.verify( - proof, public_inputs, verification_key - ) - -class AICircuitBuilder: - def __init__(self): - self.layer_builders = { - 'dense': self.build_dense_layer, - 'conv2d': self.build_conv2d_layer, - 'relu': self.build_relu_layer, - 'batch_norm': self.build_batch_norm_layer, - } - - async def build_circuit(self, model: NeuralNetwork) -> Circuit: - """Build ZK circuit for neural network""" - - circuit = Circuit() - - # Build layers sequentially - for layer in model.layers: - layer_type = layer.type - builder = self.layer_builders[layer_type] - circuit = await builder(circuit, layer) - - # Add constraints for input/output privacy - circuit = await self.add_privacy_constraints(circuit) - - return circuit - - async def build_dense_layer( - self, - circuit: Circuit, - layer: DenseLayer - ) -> Circuit: - """Build ZK circuit for dense layer""" - - # Create variables for weights and inputs - weights = circuit.create_private_variables(layer.weight_shape) - inputs = circuit.create_private_variables(layer.input_shape) - - # Matrix multiplication constraints - outputs = [] - for i in range(layer.output_size): - weighted_sum = circuit.create_linear_combination( - weights[i], inputs - ) - output = circuit.add_constraint( - weighted_sum + layer.bias[i], - "dense_output" - ) - outputs.append(output) - - return circuit -``` - -#### 2.2 Privacy-Preserving Inference -```python -class PrivateInferenceService: - def __init__(self, zk_prover: ZKProver, model_store: ModelStore): - self.zk_prover = zk_prover - self.model_store = model_store - - async def private_inference( - self, - model_id: str, - encrypted_input: EncryptedData, - privacy_requirements: PrivacyRequirements - ) -> InferenceResult: - """Perform private inference with ZK proof""" - - # Decrypt input (only for computation) - input_data = await self.decrypt_input(encrypted_input) - - # Load model (encrypted at rest) - model = await self.model_store.load_encrypted(model_id) - - # Perform inference - raw_output = await model.forward(input_data) - - # Generate ZK proof - proof = await self.zk_prover.prove_inference( - model, input_data - ) - - # Create result with proof - result = InferenceResult( - output=raw_output, - proof=proof, - model_id=model_id, - timestamp=datetime.utcnow() - ) - - return result - - async def verify_inference( - self, - result: InferenceResult, - public_commitments: PublicCommitments - ) -> bool: - """Verify inference result without learning output""" - - # Verify ZK proof - proof_valid = await self.zk_prover.verify_inference( - result.proof, - public_commitments, - result.model_id - ) - - return proof_valid -``` - -#### 2.3 Verifiable Machine Learning -```python -class VerifiableML: - def __init__(self, zk_prover: ZKProver): - self.zk_prover = zk_prover - - async def prove_training( - self, - dataset: Dataset, - model: NeuralNetwork, - training_params: TrainingParams - ) -> TrainingProof: - """Generate proof of correct training""" - - # Create training circuit - circuit = await self.create_training_circuit( - dataset, model, training_params - ) - - # Generate witness from training process - witness = await self.generate_training_witness( - dataset, model, training_params - ) - - # Generate proof - proof = await self.zk_prover.prove_training(circuit, witness) - - return TrainingProof( - proof=proof, - model_hash=model.hash(), - dataset_hash=dataset.hash(), - metrics=training_params.metrics - ) - - async def prove_model_integrity( - self, - model: NeuralNetwork, - expected_architecture: ModelArchitecture - ) -> IntegrityProof: - """Proof that model matches expected architecture""" - - # Create architecture verification circuit - circuit = await self.create_architecture_circuit( - expected_architecture - ) - - # Generate witness from model - witness = await self.extract_model_witness(model) - - # Generate proof - proof = await self.zk_prover.prove(circuit, witness) - - return IntegrityProof( - proof=proof, - architecture_hash=expected_architecture.hash() - ) -``` - -### Phase 3: Advanced Applications (Months 5-6) - -#### 3.1 ZK Rollups for AI -```python -class ZKAIRollup: - def __init__(self, layer1: Layer1, zk_prover: ZKProver): - self.layer1 = layer1 - self.zk_prover = zk_prover - self.state = RollupState() - - async def submit_batch( - self, - operations: List[AIOperation] - ) -> BatchProof: - """Submit batch of AI operations to rollup""" - - # Create batch circuit - circuit = await self.create_batch_circuit(operations) - - # Generate witness - witness = await self.generate_batch_witness( - operations, self.state - ) - - # Generate proof - proof = await self.zk_prover.prove_batch(circuit, witness) - - # Submit to Layer 1 - await self.layer1.submit_ai_batch(proof, operations) - - return BatchProof(proof=proof, operations=operations) - - async def create_batch_circuit( - self, - operations: List[AIOperation] - ) -> Circuit: - """Create circuit for batch of operations""" - - circuit = Circuit() - - # Add constraints for each operation - for op in operations: - if op.type == "inference": - circuit = await self.add_inference_constraints( - circuit, op - ) - elif op.type == "training": - circuit = await self.add_training_constraints( - circuit, op - ) - elif op.type == "model_update": - circuit = await self.add_update_constraints( - circuit, op - ) - - # Add batch-level constraints - circuit = await self.add_batch_constraints(circuit, operations) - - return circuit -``` - -#### 3.2 ZK Identity for AI Agents -```python -class ZKAgentIdentity: - def __init__(self, zk_prover: ZKProver): - self.zk_prover = zk_prover - self.identity_registry = IdentityRegistry() - - async def create_agent_identity( - self, - agent_capabilities: AgentCapabilities, - reputation_data: ReputationData - ) -> AgentIdentity: - """Create ZK identity for AI agent""" - - # Create identity circuit - circuit = await self.create_identity_circuit() - - # Generate commitment to capabilities - capability_commitment = await self.commit_to_capabilities( - agent_capabilities - ) - - # Generate ZK proof of capabilities - proof = await self.zk_prover.prove_capabilities( - circuit, agent_capabilities, capability_commitment - ) - - # Create identity - identity = AgentIdentity( - commitment=capability_commitment, - proof=proof, - nullifier=self.generate_nullifier(), - created_at=datetime.utcnow() - ) - - # Register identity - await self.identity_registry.register(identity) - - return identity - - async def prove_capability( - self, - identity: AgentIdentity, - required_capability: str, - proof_data: Any - ) -> CapabilityProof: - """Proof that agent has required capability""" - - # Create capability proof circuit - circuit = await self.create_capability_circuit(required_capability) - - # Generate witness - witness = await self.generate_capability_witness( - identity, proof_data - ) - - # Generate proof - proof = await self.zk_prover.prove_capability(circuit, witness) - - return CapabilityProof( - identity_commitment=identity.commitment, - capability=required_capability, - proof=proof - ) -``` - -### Phase 4: Optimization & Scaling (Months 7-8) - -#### 4.1 Proof Generation Optimization -- **GPU Acceleration**: CUDA kernels for constraint solving -- **Distributed Proving**: Multi-machine proof generation -- **Circuit Specialization**: Hardware-specific optimizations -- **Memory Optimization**: Efficient memory usage patterns - -#### 4.2 Verification Optimization -- **Recursive Verification**: Batch verification of proofs -- **SNARK-friendly Hashes**: Efficient hash functions -- **Aggregated Signatures**: Reduce verification overhead -- **Lightweight Clients**: Mobile-friendly verification - -#### 4.3 Storage Optimization -- **Proof Compression**: Efficient proof encoding -- **Circuit Caching**: Reuse of common circuits -- **State Commitments**: Efficient state proofs -- **Archival Strategies**: Long-term proof storage - -## Technical Specifications - -### Performance Targets - -| Metric | Current | Target | Improvement | -|--------|---------|--------|-------------| -| Proof Generation | 10 minutes | 1 minute | 10x | -| Proof Size | 1MB | 100KB | 10x | -| Verification Time | 100ms | 10ms | 10x | -| Supported Model Size | 10MB | 1GB | 100x | -| Concurrent Proofs | 10 | 1000 | 100x | - -### Supported Operations - -| Operation | ZK Support | Privacy Level | Performance | -|-----------|------------|---------------|-------------| -| Inference | ✓ | Full | High | -| Training | ✓ | Partial | Medium | -| Model Update | ✓ | Full | High | -| Data Sharing | ✓ | Full | High | -| Reputation | ✓ | Partial | High | - -### Circuit Library - -| Circuit Type | Constraints | Use Case | Optimization | -|--------------|-------------|----------|-------------| -| Dense Layer | 10K-100K | Standard NN | Lookup Tables | -| Convolution | 100K-1M | CNN | Winograd | -| Attention | 1M-10M | Transformers | Sparse | -| Pooling | 1K-10K | CNN | Custom | -| Activation | 1K-10K | All | Lookup | - -## Security Analysis - -### Privacy Guarantees - -#### 1. Input Privacy -- **Zero-Knowledge**: Proofs reveal nothing about inputs -- **Perfect Secrecy**: Information-theoretic privacy -- **Composition**: Privacy preserved under composition - -#### 2. Model Privacy -- **Weight Encryption**: Model parameters encrypted -- **Circuit Obfuscation**: Circuit structure hidden -- **Access Control**: Fine-grained permissions - -#### 3. Computation Privacy -- **Timing Protection**: Constant-time operations -- **Access Pattern**: ORAM for memory access -- **Side-Channel**: Resistant to side-channel attacks - -### Security Properties - -#### 1. Soundness -- **Computational**: Infeasible to forge invalid proofs -- **Statistical**: Negligible soundness error -- **Universal**: Works for all valid inputs - -#### 2. Completeness -- **Perfect**: All valid proofs verify -- **Efficient**: Fast verification -- **Robust**: Tolerates noise - -#### 3. Zero-Knowledge -- **Perfect**: Zero information leakage -- **Simulation**: Simulator exists -- **Composition**: Composable ZK - -## Implementation Plan - -### Phase 1: Foundation (Months 1-2) -- [ ] Complete ZK circuit library design -- [ ] Implement core prover/verifier -- [ ] Create privacy model framework -- [ ] Set up development environment - -### Phase 2: Core Features (Months 3-4) -- [ ] Implement private inference -- [ ] Build verifiable ML system -- [ ] Create ZK rollup for AI -- [ ] Develop ZK identity system - -### Phase 3: Advanced Features (Months 5-6) -- [ ] Add recursive proofs -- [ ] Implement distributed proving -- [ ] Create ZK marketplace -- [ ] Build developer SDK - -### Phase 4: Optimization (Months 7-8) -- [ ] GPU acceleration -- [ ] Proof compression -- [ ] Verification optimization -- [ ] Storage optimization - -### Phase 5: Integration (Months 9-12) -- [ ] Integrate with AITBC -- [ ] Deploy testnet -- [ ] Developer onboarding -- [ ] Mainnet launch - -## Deliverables - -### Technical Deliverables -1. **ZK Circuit Library** (Month 2) -2. **Private Inference System** (Month 4) -3. **ZK Rollup Implementation** (Month 6) -4. **Optimized Prover** (Month 8) -5. **Mainnet Integration** (Month 12) - -### Research Deliverables -1. **Conference Papers**: 3 papers on ZK for AI -2. **Technical Reports**: Quarterly progress -3. **Open Source**: All code under MIT license -4. **Standards**: ZK protocol specifications - -### Developer Deliverables -1. **SDK**: Multi-language development kit -2. **Documentation**: Comprehensive guides -3. **Examples**: AI/ML use cases -4. **Tools**: Circuit compiler, debugger - -## Resource Requirements - -### Team -- **Principal Investigator** (1): ZK cryptography expert -- **Cryptography Engineers** (3): ZK system implementation -- **AI/ML Engineers** (2): AI circuit design -- **Systems Engineers** (2): Performance optimization -- **Security Researchers** (2): Security analysis -- **Developer Advocate** (1): Developer tools - -### Infrastructure -- **GPU Cluster**: 100 GPUs for proving -- **Compute Nodes**: 50 CPU nodes for verification -- **Storage**: 100TB for model storage -- **Network**: High-bandwidth for data transfer - -### Budget -- **Personnel**: $7M -- **Infrastructure**: $2M -- **Research**: $1M -- **Community**: $1M - -## Success Metrics - -### Technical Metrics -- [ ] Achieve 1-minute proof generation -- [ ] Support 1GB+ models -- [ ] Handle 1000+ concurrent proofs -- [ ] Pass 3 security audits -- [ ] 10x improvement over baseline - -### Adoption Metrics -- [ ] 100+ AI models using ZK -- [ ] 10+ enterprise applications -- [ ] 1000+ active developers -- [ ] 1M+ ZK proofs generated -- [ ] 5+ partnerships - -### Research Metrics -- [ ] 3+ papers at top conferences -- [ ] 5+ patents filed -- [ ] 10+ academic collaborations -- [ ] Open source with 10,000+ stars -- [ ] Industry recognition - -## Risk Mitigation - -### Technical Risks -1. **Proof Complexity**: AI circuits may be too complex - - Mitigation: Incremental complexity, optimization -2. **Performance**: May not meet performance targets - - Mitigation: Hardware acceleration, parallelization -3. **Security**: New attack vectors possible - - Mitigation: Formal verification, audits - -### Adoption Risks -1. **Complexity**: Hard to use for developers - - Mitigation: Abstractions, SDK, documentation -2. **Cost**: Proving may be expensive - - Mitigation: Optimization, subsidies -3. **Interoperability**: May not work with other systems - - Mitigation: Standards, bridges - -### Research Risks -1. **Dead Ends**: Some approaches may not work - - Mitigation: Parallel research tracks -2. **Obsolescence**: Technology may change - - Mitigation: Flexible architecture -3. **Competition**: Others may advance faster - - Mitigation: Focus on AI specialization - -## Conclusion - -This research plan establishes AITBC as the leader in zero-knowledge applications for AI/ML workloads. The combination of privacy-preserving inference, verifiable machine learning, and scalable ZK infrastructure creates a unique value proposition for the AI community. - -The 12-month timeline with clear deliverables ensures steady progress toward production-ready implementation. The research outcomes will not only benefit AITBC but advance the entire field of privacy-preserving AI. - -By focusing on practical applications and developer experience, we ensure that the research translates into real-world impact, enabling the next generation of privacy-preserving AI applications on blockchain. - ---- - -*This research plan will evolve based on technological advances and community feedback. Regular reviews ensure alignment with ecosystem needs.* diff --git a/research/prototypes/hybrid_consensus/README.md b/research/prototypes/hybrid_consensus/README.md deleted file mode 100644 index 6dd4fa62..00000000 --- a/research/prototypes/hybrid_consensus/README.md +++ /dev/null @@ -1,196 +0,0 @@ -# Hybrid PoA/PoS Consensus Prototype - -A working implementation of the hybrid Proof of Authority / Proof of Stake consensus mechanism for the AITBC platform. This prototype demonstrates the key innovations of our research and serves as a proof-of-concept for consortium recruitment. - -## Overview - -The hybrid consensus combines the speed and efficiency of Proof of Authority with the decentralization and economic security of Proof of Stake. It dynamically adjusts between three operational modes based on network conditions: - -- **FAST Mode**: PoA dominant, 100-200ms finality, up to 50,000 TPS -- **BALANCED Mode**: Equal PoA/PoS, 500ms-1s finality, up to 20,000 TPS -- **SECURE Mode**: PoS dominant, 2-5s finality, up to 10,000 TPS - -## Features - -### Core Features -- ✅ Dynamic mode switching based on network conditions -- ✅ VRF-based proposer selection with fairness guarantees -- ✅ Adaptive signature thresholds -- ✅ Dual security model (authority + stake) -- ✅ Sub-second finality in optimal conditions -- ✅ Scalable to 1000+ validators - -### Security Features -- ✅ 51% attack resistance (requires >2/3 authorities AND >2/3 stake) -- ✅ Censorship resistance through random proposer selection -- ✅ Long range attack protection with checkpoints -- ✅ Slashing mechanisms for misbehavior -- ✅ Economic security through stake bonding - -### Performance Features -- ✅ High throughput (up to 50,000 TPS) -- ✅ Fast finality (100ms in FAST mode) -- ✅ Efficient signature aggregation -- ✅ Optimized for AI/ML workloads -- ✅ Low resource requirements - -## Quick Start - -### Prerequisites -- Python 3.8+ -- asyncio -- matplotlib (for demo charts) -- numpy - -### Installation -```bash -cd research/prototypes/hybrid_consensus -pip install -r requirements.txt -``` - -### Running the Prototype - -#### Basic Consensus Simulation -```bash -python consensus.py -``` - -#### Full Demonstration -```bash -python demo.py -``` - -The demonstration includes: -1. Mode performance comparison -2. Dynamic mode switching -3. Scalability testing -4. Security feature validation - -## Architecture - -### Components - -``` -HybridConsensus -├── AuthoritySet (21 validators) -├── StakerSet (100+ validators) -├── VRF (Verifiable Random Function) -├── ModeSelector (dynamic mode switching) -├── ProposerSelector (fair proposer selection) -└── ValidationEngine (signature thresholds) -``` - -### Key Algorithms - -#### Mode Selection -```python -def determine_mode(self) -> ConsensusMode: - load = self.metrics.network_load - auth_availability = self.metrics.authority_availability - stake_participation = self.metrics.stake_participation - - if load < 0.3 and auth_availability > 0.9: - return ConsensusMode.FAST - elif load > 0.7 or stake_participation > 0.8: - return ConsensusMode.SECURE - else: - return ConsensusMode.BALANCED -``` - -#### Proposer Selection -- **FAST Mode**: Authority-only selection -- **BALANCED Mode**: 70% authority, 30% staker -- **SECURE Mode**: Stake-weighted selection - -## Performance Results - -### Mode Comparison - -| Mode | TPS | Finality | Security Level | -|------|-----|----------|----------------| -| FAST | 45,000 | 150ms | High | -| BALANCED | 18,500 | 850ms | Very High | -| SECURE | 9,200 | 4.2s | Maximum | - -### Scalability - -| Validators | TPS | Latency | -|------------|-----|---------| -| 50 | 42,000 | 180ms | -| 100 | 38,500 | 200ms | -| 500 | 32,000 | 250ms | -| 1000 | 28,000 | 300ms | - -## Security Analysis - -### Attack Resistance - -1. **51% Attack**: Requires controlling >2/3 of authorities AND >2/3 of stake -2. **Censorship**: Random proposer selection prevents targeted censorship -3. **Long Range**: Checkpoints and weak subjectivity prevent history attacks -4. **Nothing at Stake**: Slashing prevents double signing - -### Economic Security - -- Minimum stake: 1,000 AITBC for stakers, 10,000 for authorities -- Slashing: 10% of stake for equivocation -- Rewards: 5-15% APY depending on mode and participation -- Unbonding: 21 days to prevent long range attacks - -## Research Validation - -This prototype validates key research hypotheses: - -1. **Dynamic Consensus**: Successfully demonstrates adaptive mode switching -2. **Performance**: Achieves target throughput and latency metrics -3. **Security**: Implements dual-security model as specified -4. **Scalability**: Maintains performance with 1000+ validators -5. **Fairness**: VRF-based selection ensures fair proposer distribution - -## Next Steps for Production - -1. **Cryptography Integration**: Replace mock signatures with BLS -2. **Network Layer**: Implement P2P message propagation -3. **State Management**: Add efficient state storage -4. **Optimization**: GPU acceleration for ZK proofs -5. **Audits**: Security audits and formal verification - -## Consortium Integration - -This prototype serves as: -- ✅ Proof of concept for research validity -- ✅ Demonstration for potential consortium members -- ✅ Foundation for production implementation -- ✅ Reference for standardization efforts - -## Files - -- `consensus.py` - Core consensus implementation -- `demo.py` - Demonstration script with performance tests -- `README.md` - This documentation -- `requirements.txt` - Python dependencies - -## Charts and Reports - -Running the demo generates: -- `mode_comparison.png` - Performance comparison chart -- `mode_transitions.png` - Dynamic mode switching visualization -- `scalability.png` - Scalability analysis chart -- `demo_report.json` - Detailed demonstration report - -## Contributing - -This is a research prototype. For production development, please join the AITBC Research Consortium. - -## License - -MIT License - See LICENSE file for details - -## Contact - -Research Consortium: research@aitbc.io -Prototype Issues: Create GitHub issue - ---- - -**Note**: This is a simplified prototype for demonstration purposes. Production implementation will include additional security measures, optimizations, and features. diff --git a/research/prototypes/hybrid_consensus/consensus.py b/research/prototypes/hybrid_consensus/consensus.py deleted file mode 100644 index 3124897f..00000000 --- a/research/prototypes/hybrid_consensus/consensus.py +++ /dev/null @@ -1,431 +0,0 @@ -""" -Hybrid Proof of Authority / Proof of Stake Consensus Implementation -Prototype for demonstrating the hybrid consensus mechanism -""" - -import asyncio -import time -import hashlib -import json -from enum import Enum -from dataclasses import dataclass, asdict -from typing import Dict, List, Optional, Set, Tuple -from datetime import datetime, timedelta -import logging -from collections import defaultdict -import random - -# Configure logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -class ConsensusMode(Enum): - """Consensus operation modes""" - FAST = "fast" # PoA dominant, 100ms finality - BALANCED = "balanced" # Equal PoA/PoS, 1s finality - SECURE = "secure" # PoS dominant, 5s finality - - -@dataclass -class Validator: - """Validator information""" - address: str - is_authority: bool - stake: float - last_seen: datetime - reputation: float - voting_power: float - - def __hash__(self): - return hash(self.address) - - -@dataclass -class Block: - """Block structure""" - number: int - hash: str - parent_hash: str - proposer: str - timestamp: datetime - mode: ConsensusMode - transactions: List[dict] - authority_signatures: List[str] - stake_signatures: List[str] - merkle_root: str - - -@dataclass -class NetworkMetrics: - """Network performance metrics""" - tps: float - latency: float - active_validators: int - stake_participation: float - authority_availability: float - network_load: float - - -class VRF: - """Simplified Verifiable Random Function""" - - @staticmethod - def evaluate(seed: str) -> float: - """Generate pseudo-random value from seed""" - hash_obj = hashlib.sha256(seed.encode()) - return int(hash_obj.hexdigest(), 16) / (2**256) - - @staticmethod - def prove(seed: str, private_key: str) -> Tuple[str, float]: - """Generate VRF proof and value""" - # Simplified VRF implementation - combined = f"{seed}{private_key}" - proof = hashlib.sha256(combined.encode()).hexdigest() - value = VRF.evaluate(combined) - return proof, value - - -class HybridConsensus: - """Hybrid PoA/PoS consensus implementation""" - - def __init__(self, config: dict): - self.config = config - self.mode = ConsensusMode.BALANCED - self.authorities: Set[Validator] = set() - self.stakers: Set[Validator] = set() - self.current_block = 0 - self.chain: List[Block] = [] - self.vrf = VRF() - self.metrics = NetworkMetrics(0, 0, 0, 0, 0, 0) - self.last_block_time = datetime.utcnow() - self.block_times = [] - - # Initialize authorities - self._initialize_validators() - - def _initialize_validators(self): - """Initialize test validators""" - # Create 21 authorities - for i in range(21): - auth = Validator( - address=f"authority_{i:02d}", - is_authority=True, - stake=10000.0, - last_seen=datetime.utcnow(), - reputation=1.0, - voting_power=1.0 - ) - self.authorities.add(auth) - - # Create 100 stakers - for i in range(100): - stake = random.uniform(1000, 50000) - staker = Validator( - address=f"staker_{i:03d}", - is_authority=False, - stake=stake, - last_seen=datetime.utcnow(), - reputation=1.0, - voting_power=stake / 1000.0 - ) - self.stakers.add(staker) - - def determine_mode(self) -> ConsensusMode: - """Determine optimal consensus mode based on network conditions""" - load = self.metrics.network_load - auth_availability = self.metrics.authority_availability - stake_participation = self.metrics.stake_participation - - if load < 0.3 and auth_availability > 0.9: - return ConsensusMode.FAST - elif load > 0.7 or stake_participation > 0.8: - return ConsensusMode.SECURE - else: - return ConsensusMode.BALANCED - - def select_proposer(self, slot: int, mode: ConsensusMode) -> Validator: - """Select block proposer using VRF-based selection""" - seed = f"propose-{slot}-{self.current_block}" - - if mode == ConsensusMode.FAST: - return self._select_authority(seed) - elif mode == ConsensusMode.BALANCED: - return self._select_hybrid(seed) - else: # SECURE - return self._select_staker_weighted(seed) - - def _select_authority(self, seed: str) -> Validator: - """Select authority proposer""" - authorities = list(self.authorities) - seed_value = self.vrf.evaluate(seed) - index = int(seed_value * len(authorities)) - return authorities[index] - - def _select_hybrid(self, seed: str) -> Validator: - """Hybrid selection (70% authority, 30% staker)""" - seed_value = self.vrf.evaluate(seed) - - if seed_value < 0.7: - return self._select_authority(seed) - else: - return self._select_staker_weighted(seed) - - def _select_staker_weighted(self, seed: str) -> Validator: - """Select staker with probability proportional to stake""" - stakers = list(self.stakers) - total_stake = sum(s.stake for s in stakers) - - # Weighted random selection - seed_value = self.vrf.evaluate(seed) * total_stake - cumulative = 0 - - for staker in sorted(stakers, key=lambda x: x.stake): - cumulative += staker.stake - if cumulative >= seed_value: - return staker - - return stakers[-1] # Fallback - - async def propose_block(self, proposer: Validator, mode: ConsensusMode) -> Block: - """Propose a new block""" - # Create block - block = Block( - number=self.current_block + 1, - parent_hash=self.chain[-1].hash if self.chain else "genesis", - proposer=proposer.address, - timestamp=datetime.utcnow(), - mode=mode, - transactions=self._generate_transactions(mode), - authority_signatures=[], - stake_signatures=[], - merkle_root="" - ) - - # Calculate merkle root - block.merkle_root = self._calculate_merkle_root(block.transactions) - block.hash = self._calculate_block_hash(block) - - # Collect signatures - block = await self._collect_signatures(block, mode) - - return block - - def _generate_transactions(self, mode: ConsensusMode) -> List[dict]: - """Generate sample transactions""" - if mode == ConsensusMode.FAST: - tx_count = random.randint(100, 500) - elif mode == ConsensusMode.BALANCED: - tx_count = random.randint(50, 200) - else: # SECURE - tx_count = random.randint(10, 100) - - transactions = [] - for i in range(tx_count): - tx = { - "from": f"user_{random.randint(0, 999)}", - "to": f"user_{random.randint(0, 999)}", - "amount": random.uniform(0.01, 1000), - "gas": random.randint(21000, 100000), - "nonce": i - } - transactions.append(tx) - - return transactions - - def _calculate_merkle_root(self, transactions: List[dict]) -> str: - """Calculate merkle root of transactions""" - if not transactions: - return hashlib.sha256(b"").hexdigest() - - # Simple merkle tree implementation - tx_hashes = [hashlib.sha256(json.dumps(tx, sort_keys=True).encode()).hexdigest() - for tx in transactions] - - while len(tx_hashes) > 1: - next_level = [] - for i in range(0, len(tx_hashes), 2): - left = tx_hashes[i] - right = tx_hashes[i + 1] if i + 1 < len(tx_hashes) else left - combined = hashlib.sha256((left + right).encode()).hexdigest() - next_level.append(combined) - tx_hashes = next_level - - return tx_hashes[0] - - def _calculate_block_hash(self, block: Block) -> str: - """Calculate block hash""" - block_data = { - "number": block.number, - "parent_hash": block.parent_hash, - "proposer": block.proposer, - "timestamp": block.timestamp.isoformat(), - "mode": block.mode.value, - "merkle_root": block.merkle_root - } - return hashlib.sha256(json.dumps(block_data, sort_keys=True).encode()).hexdigest() - - async def _collect_signatures(self, block: Block, mode: ConsensusMode) -> Block: - """Collect required signatures for block""" - # Authority signatures (always required) - auth_threshold = self._get_authority_threshold(mode) - authorities = list(self.authorities)[:auth_threshold] - - for auth in authorities: - signature = f"auth_sig_{auth.address}_{block.hash[:8]}" - block.authority_signatures.append(signature) - - # Stake signatures (required in BALANCED and SECURE modes) - if mode in [ConsensusMode.BALANCED, ConsensusMode.SECURE]: - stake_threshold = self._get_stake_threshold(mode) - stakers = list(self.stakers)[:stake_threshold] - - for staker in stakers: - signature = f"stake_sig_{staker.address}_{block.hash[:8]}" - block.stake_signatures.append(signature) - - return block - - def _get_authority_threshold(self, mode: ConsensusMode) -> int: - """Get required authority signature threshold""" - if mode == ConsensusMode.FAST: - return 14 # 2/3 of 21 - elif mode == ConsensusMode.BALANCED: - return 14 # 2/3 of 21 - else: # SECURE - return 7 # 1/3 of 21 - - def _get_stake_threshold(self, mode: ConsensusMode) -> int: - """Get required staker signature threshold""" - if mode == ConsensusMode.BALANCED: - return 33 # 1/3 of 100 - else: # SECURE - return 67 # 2/3 of 100 - - def validate_block(self, block: Block) -> bool: - """Validate block according to current mode""" - # Check authority signatures - auth_threshold = self._get_authority_threshold(block.mode) - if len(block.authority_signatures) < auth_threshold: - return False - - # Check stake signatures if required - if block.mode in [ConsensusMode.BALANCED, ConsensusMode.SECURE]: - stake_threshold = self._get_stake_threshold(block.mode) - if len(block.stake_signatures) < stake_threshold: - return False - - # Check block hash - calculated_hash = self._calculate_block_hash(block) - if calculated_hash != block.hash: - return False - - # Check merkle root - calculated_root = self._calculate_merkle_root(block.transactions) - if calculated_root != block.merkle_root: - return False - - return True - - def update_metrics(self): - """Update network performance metrics""" - if len(self.block_times) > 0: - avg_block_time = sum(self.block_times[-10:]) / min(10, len(self.block_times)) - self.metrics.latency = avg_block_time - self.metrics.tps = 1000 / avg_block_time if avg_block_time > 0 else 0 - - self.metrics.active_validators = len(self.authorities) + len(self.stakers) - self.metrics.stake_participation = 0.85 # Simulated - self.metrics.authority_availability = 0.95 # Simulated - self.metrics.network_load = random.uniform(0.2, 0.8) # Simulated - - async def run_consensus(self, num_blocks: int = 100): - """Run consensus simulation""" - logger.info(f"Starting hybrid consensus simulation for {num_blocks} blocks") - - start_time = time.time() - - for i in range(num_blocks): - # Update metrics and determine mode - self.update_metrics() - self.mode = self.determine_mode() - - # Select proposer - proposer = self.select_proposer(i, self.mode) - - # Propose block - block = await self.propose_block(proposer, self.mode) - - # Validate block - if self.validate_block(block): - self.chain.append(block) - self.current_block += 1 - - # Track block time - now = datetime.utcnow() - block_time = (now - self.last_block_time).total_seconds() - self.block_times.append(block_time) - self.last_block_time = now - - logger.info( - f"Block {block.number} proposed by {proposer.address} " - f"in {mode.name} mode ({block_time:.3f}s, {len(block.transactions)} txs)" - ) - else: - logger.error(f"Block {block.number} validation failed") - - # Small delay to simulate network - await asyncio.sleep(0.01) - - total_time = time.time() - start_time - - # Print statistics - self.print_statistics(total_time) - - def print_statistics(self, total_time: float): - """Print consensus statistics""" - logger.info("\n=== Consensus Statistics ===") - logger.info(f"Total blocks: {len(self.chain)}") - logger.info(f"Total time: {total_time:.2f}s") - logger.info(f"Average TPS: {len(self.chain) / total_time:.2f}") - logger.info(f"Average block time: {sum(self.block_times) / len(self.block_times):.3f}s") - - # Mode distribution - mode_counts = defaultdict(int) - for block in self.chain: - mode_counts[block.mode] += 1 - - logger.info("\nMode distribution:") - for mode, count in mode_counts.items(): - percentage = (count / len(self.chain)) * 100 - logger.info(f" {mode.value}: {count} blocks ({percentage:.1f}%)") - - # Proposer distribution - proposer_counts = defaultdict(int) - for block in self.chain: - proposer_counts[block.proposer] += 1 - - logger.info("\nTop proposers:") - sorted_proposers = sorted(proposer_counts.items(), key=lambda x: x[1], reverse=True)[:5] - for proposer, count in sorted_proposers: - logger.info(f" {proposer}: {count} blocks") - - -async def main(): - """Main function to run the consensus prototype""" - config = { - "num_authorities": 21, - "num_stakers": 100, - "block_time_target": 0.5, # 500ms target - } - - consensus = HybridConsensus(config) - - # Run simulation - await consensus.run_consensus(num_blocks=100) - - logger.info("\nConsensus simulation completed!") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/research/prototypes/hybrid_consensus/demo.py b/research/prototypes/hybrid_consensus/demo.py deleted file mode 100644 index e2010576..00000000 --- a/research/prototypes/hybrid_consensus/demo.py +++ /dev/null @@ -1,346 +0,0 @@ -""" -Hybrid Consensus Demonstration Script -Showcases the key features of the hybrid PoA/PoS consensus -""" - -import asyncio -import time -import matplotlib.pyplot as plt -import numpy as np -from consensus import HybridConsensus, ConsensusMode -import json - - -class ConsensusDemo: - """Demonstration runner for hybrid consensus""" - - def __init__(self): - self.results = { - "block_times": [], - "tps_history": [], - "mode_history": [], - "proposer_history": [] - } - - async def run_mode_comparison(self): - """Compare performance across different modes""" - print("\n=== Mode Performance Comparison ===\n") - - # Test each mode individually - modes = [ConsensusMode.FAST, ConsensusMode.BALANCED, ConsensusMode.SECURE] - mode_results = {} - - for mode in modes: - print(f"\nTesting {mode.value.upper()} mode...") - - # Create consensus with forced mode - consensus = HybridConsensus({}) - consensus.mode = mode - - # Run 50 blocks - start_time = time.time() - await consensus.run_consensus(num_blocks=50) - end_time = time.time() - - # Calculate metrics - total_time = end_time - start_time - avg_tps = len(consensus.chain) / total_time - avg_block_time = sum(consensus.block_times) / len(consensus.block_times) - - mode_results[mode.value] = { - "tps": avg_tps, - "block_time": avg_block_time, - "blocks": len(consensus.chain) - } - - print(f" Average TPS: {avg_tps:.2f}") - print(f" Average Block Time: {avg_block_time:.3f}s") - - # Create comparison chart - self._plot_mode_comparison(mode_results) - - return mode_results - - async def run_dynamic_mode_demo(self): - """Demonstrate dynamic mode switching""" - print("\n=== Dynamic Mode Switching Demo ===\n") - - consensus = HybridConsensus({}) - - # Simulate varying network conditions - print("Simulating varying network conditions...") - - for phase in range(3): - print(f"\nPhase {phase + 1}:") - - # Adjust network load - if phase == 0: - consensus.metrics.network_load = 0.2 # Low load - print(" Low network load - expecting FAST mode") - elif phase == 1: - consensus.metrics.network_load = 0.5 # Medium load - print(" Medium network load - expecting BALANCED mode") - else: - consensus.metrics.network_load = 0.9 # High load - print(" High network load - expecting SECURE mode") - - # Run blocks and observe mode - for i in range(20): - consensus.update_metrics() - mode = consensus.determine_mode() - - if i == 0: - print(f" Selected mode: {mode.value.upper()}") - - # Record mode - self.results["mode_history"].append(mode) - - # Simulate block production - await asyncio.sleep(0.01) - - # Plot mode transitions - self._plot_mode_transitions() - - async def run_scalability_test(self): - """Test scalability with increasing validators""" - print("\n=== Scalability Test ===\n") - - validator_counts = [50, 100, 200, 500, 1000] - scalability_results = {} - - for count in validator_counts: - print(f"\nTesting with {count} validators...") - - # Create consensus with custom validator count - consensus = HybridConsensus({}) - - # Add more stakers - for i in range(count - 100): - import random - stake = random.uniform(1000, 50000) - from consensus import Validator - staker = Validator( - address=f"staker_{i+100:04d}", - is_authority=False, - stake=stake, - last_seen=None, - reputation=1.0, - voting_power=stake / 1000.0 - ) - consensus.stakers.add(staker) - - # Measure performance - start_time = time.time() - await consensus.run_consensus(num_blocks=100) - end_time = time.time() - - total_time = end_time - start_time - tps = len(consensus.chain) / total_time - - scalability_results[count] = tps - print(f" Achieved TPS: {tps:.2f}") - - # Plot scalability - self._plot_scalability(scalability_results) - - return scalability_results - - async def run_security_demo(self): - """Demonstrate security features""" - print("\n=== Security Features Demo ===\n") - - consensus = HybridConsensus({}) - - # Test 1: Signature threshold validation - print("\n1. Testing signature thresholds...") - - # Create a minimal block - from consensus import Block, Validator - proposer = next(iter(consensus.authorities)) - - block = Block( - number=1, - parent_hash="genesis", - proposer=proposer.address, - timestamp=None, - mode=ConsensusMode.BALANCED, - transactions=[], - authority_signatures=["sig1"], # Insufficient signatures - stake_signatures=[], - merkle_root="" - ) - - is_valid = consensus.validate_block(block) - print(f" Block with insufficient signatures: {'VALID' if is_valid else 'INVALID'}") - - # Add sufficient signatures - for i in range(14): # Meet threshold - block.authority_signatures.append(f"sig{i+2}") - - is_valid = consensus.validate_block(block) - print(f" Block with sufficient signatures: {'VALID' if is_valid else 'INVALID'}") - - # Test 2: Mode-based security levels - print("\n2. Testing mode-based security levels...") - - for mode in [ConsensusMode.FAST, ConsensusMode.BALANCED, ConsensusMode.SECURE]: - auth_threshold = consensus._get_authority_threshold(mode) - stake_threshold = consensus._get_stake_threshold(mode) - - print(f" {mode.value.upper()} mode:") - print(f" Authority signatures required: {auth_threshold}") - print(f" Stake signatures required: {stake_threshold}") - - # Test 3: Proposer selection fairness - print("\n3. Testing proposer selection fairness...") - - proposer_counts = {} - for i in range(1000): - proposer = consensus.select_proposer(i, ConsensusMode.BALANCED) - proposer_counts[proposer.address] = proposer_counts.get(proposer.address, 0) + 1 - - # Calculate fairness metric - total_selections = sum(proposer_counts.values()) - expected_per_validator = total_selections / len(proposer_counts) - variance = np.var(list(proposer_counts.values())) - - print(f" Total validators: {len(proposer_counts)}") - print(f" Expected selections per validator: {expected_per_validator:.1f}") - print(f" Variance in selections: {variance:.2f}") - print(f" Fairness score: {100 / (1 + variance):.1f}/100") - - def _plot_mode_comparison(self, results): - """Create mode comparison chart""" - modes = list(results.keys()) - tps_values = [results[m]["tps"] for m in modes] - block_times = [results[m]["block_time"] * 1000 for m in modes] # Convert to ms - - fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) - - # TPS comparison - ax1.bar(modes, tps_values, color=['#2ecc71', '#3498db', '#e74c3c']) - ax1.set_title('Throughput (TPS)') - ax1.set_ylabel('Transactions Per Second') - - # Block time comparison - ax2.bar(modes, block_times, color=['#2ecc71', '#3498db', '#e74c3c']) - ax2.set_title('Block Time') - ax2.set_ylabel('Time (milliseconds)') - - plt.tight_layout() - plt.savefig('/home/oib/windsurf/aitbc/research/prototypes/hybrid_consensus/mode_comparison.png') - print("\nSaved mode comparison chart to mode_comparison.png") - - def _plot_mode_transitions(self): - """Plot mode transitions over time""" - mode_numeric = [1 if m == ConsensusMode.FAST else - 2 if m == ConsensusMode.BALANCED else - 3 for m in self.results["mode_history"]] - - plt.figure(figsize=(10, 5)) - plt.plot(mode_numeric, marker='o') - plt.yticks([1, 2, 3], ['FAST', 'BALANCED', 'SECURE']) - plt.xlabel('Block Number') - plt.ylabel('Consensus Mode') - plt.title('Dynamic Mode Switching') - plt.grid(True, alpha=0.3) - - plt.savefig('/home/oib/windsurf/aitbc/research/prototypes/hybrid_consensus/mode_transitions.png') - print("Saved mode transitions chart to mode_transitions.png") - - def _plot_scalability(self, results): - """Plot scalability results""" - validator_counts = list(results.keys()) - tps_values = list(results.values()) - - plt.figure(figsize=(10, 5)) - plt.plot(validator_counts, tps_values, marker='o', linewidth=2) - plt.xlabel('Number of Validators') - plt.ylabel('Throughput (TPS)') - plt.title('Scalability: TPS vs Validator Count') - plt.grid(True, alpha=0.3) - - plt.savefig('/home/oib/windsurf/aitbc/research/prototypes/hybrid_consensus/scalability.png') - print("Saved scalability chart to scalability.png") - - def generate_report(self, mode_results, scalability_results): - """Generate demonstration report""" - report = { - "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"), - "prototype": "Hybrid PoA/PoS Consensus", - "version": "1.0", - "results": { - "mode_performance": mode_results, - "scalability": scalability_results, - "key_features": [ - "Dynamic mode switching based on network conditions", - "Sub-second finality in FAST mode (100-200ms)", - "High throughput in BALANCED mode (up to 20,000 TPS)", - "Enhanced security in SECURE mode", - "Fair proposer selection with VRF", - "Adaptive signature thresholds" - ], - "achievements": [ - "Successfully implemented hybrid consensus", - "Demonstrated 3 operation modes", - "Achieved target performance metrics", - "Validated security mechanisms", - "Showed scalability to 1000+ validators" - ] - } - } - - with open('/home/oib/windsurf/aitbc/research/prototypes/hybrid_consensus/demo_report.json', 'w') as f: - json.dump(report, f, indent=2) - - print("\nGenerated demonstration report: demo_report.json") - - return report - - -async def main(): - """Main demonstration function""" - print("=" * 60) - print("AITBC Hybrid Consensus Prototype Demonstration") - print("=" * 60) - - demo = ConsensusDemo() - - # Run all demonstrations - print("\n🚀 Starting demonstrations...\n") - - # 1. Mode performance comparison - mode_results = await demo.run_mode_comparison() - - # 2. Dynamic mode switching - await demo.run_dynamic_mode_demo() - - # 3. Scalability test - scalability_results = await demo.run_scalability_test() - - # 4. Security features - await demo.run_security_demo() - - # 5. Generate report - report = demo.generate_report(mode_results, scalability_results) - - print("\n" + "=" * 60) - print("✅ Demonstration completed successfully!") - print("=" * 60) - - print("\nKey Achievements:") - print("• Implemented working hybrid consensus prototype") - print("• Demonstrated dynamic mode switching") - print("• Achieved target performance metrics") - print("• Validated security mechanisms") - print("• Showed scalability to 1000+ validators") - - print("\nNext Steps for Consortium:") - print("1. Review prototype implementation") - print("2. Discuss customization requirements") - print("3. Plan production development roadmap") - print("4. Allocate development resources") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/research/prototypes/hybrid_consensus/requirements.txt b/research/prototypes/hybrid_consensus/requirements.txt deleted file mode 100644 index c67e1f0e..00000000 --- a/research/prototypes/hybrid_consensus/requirements.txt +++ /dev/null @@ -1,31 +0,0 @@ -# Hybrid Consensus Prototype Requirements - -# Core dependencies -asyncio -hashlib -json -logging -random -datetime -collections -dataclasses -enum -typing - -# Visualization and analysis -matplotlib>=3.5.0 -numpy>=1.21.0 - -# Development and testing -pytest>=6.0.0 -pytest-asyncio>=0.18.0 -pytest-cov>=3.0.0 - -# Documentation -sphinx>=4.0.0 -sphinx-rtd-theme>=1.0.0 - -# Code quality -black>=22.0.0 -flake8>=4.0.0 -mypy>=0.950 diff --git a/research/prototypes/rollups/zk_rollup.py b/research/prototypes/rollups/zk_rollup.py deleted file mode 100644 index 553b3277..00000000 --- a/research/prototypes/rollups/zk_rollup.py +++ /dev/null @@ -1,474 +0,0 @@ -""" -ZK-Rollup Implementation for AITBC -Provides scalability through zero-knowledge proof aggregation -""" - -import asyncio -import json -import hashlib -import time -from datetime import datetime, timedelta -from typing import Dict, List, Optional, Tuple -from dataclasses import dataclass, asdict -from enum import Enum -import logging -import random - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -class RollupStatus(Enum): - """Rollup status""" - ACTIVE = "active" - PROVING = "proving" - COMMITTED = "committed" - FINALIZED = "finalized" - - -@dataclass -class RollupTransaction: - """Transaction within rollup""" - tx_hash: str - from_address: str - to_address: str - amount: int - gas_limit: int - gas_price: int - nonce: int - data: str = "" - timestamp: datetime = None - - def __post_init__(self): - if self.timestamp is None: - self.timestamp = datetime.utcnow() - - -@dataclass -class RollupBatch: - """Batch of transactions with ZK proof""" - batch_id: int - transactions: List[RollupTransaction] - merkle_root: str - zk_proof: str - previous_state_root: str - new_state_root: str - timestamp: datetime - status: RollupStatus = RollupStatus.ACTIVE - - -@dataclass -class AccountState: - """Account state in rollup""" - address: str - balance: int - nonce: int - storage_root: str - - -class ZKRollup: - """ZK-Rollup implementation""" - - def __init__(self, layer1_address: str): - self.layer1_address = layer1_address - self.current_batch_id = 0 - self.pending_transactions: List[RollupTransaction] = [] - self.batches: Dict[int, RollupBatch] = {} - self.account_states: Dict[str, AccountState] = {} - self.status = RollupStatus.ACTIVE - - # Rollup parameters - self.max_batch_size = 1000 - self.batch_interval = 60 # seconds - self.proving_time = 30 # seconds (simulated) - - logger.info(f"Initialized ZK-Rollup at {layer1_address}") - - def deposit(self, address: str, amount: int) -> str: - """Deposit funds from Layer 1 to rollup""" - # Create deposit transaction - deposit_tx = RollupTransaction( - tx_hash=self._generate_tx_hash("deposit", address, amount), - from_address=self.layer1_address, - to_address=address, - amount=amount, - gas_limit=21000, - gas_price=0, - nonce=len(self.pending_transactions), - data="deposit" - ) - - # Update account state - if address not in self.account_states: - self.account_states[address] = AccountState( - address=address, - balance=0, - nonce=0, - storage_root="" - ) - - self.account_states[address].balance += amount - - logger.info(f"Deposited {amount} to {address}") - - return deposit_tx.tx_hash - - def submit_transaction( - self, - from_address: str, - to_address: str, - amount: int, - gas_limit: int = 21000, - gas_price: int = 20 * 10**9, - data: str = "" - ) -> str: - """Submit transaction to rollup""" - - # Validate sender - if from_address not in self.account_states: - raise ValueError(f"Account {from_address} not found") - - sender_state = self.account_states[from_address] - - # Check balance - total_cost = amount + (gas_limit * gas_price) - if sender_state.balance < total_cost: - raise ValueError("Insufficient balance") - - # Create transaction - tx = RollupTransaction( - tx_hash=self._generate_tx_hash("transfer", from_address, to_address, amount), - from_address=from_address, - to_address=to_address, - amount=amount, - gas_limit=gas_limit, - gas_price=gas_price, - nonce=sender_state.nonce, - data=data - ) - - # Add to pending - self.pending_transactions.append(tx) - - # Update nonce - sender_state.nonce += 1 - - logger.info(f"Submitted transaction {tx.tx_hash[:8]} from {from_address} to {to_address}") - - return tx.tx_hash - - async def create_batch(self) -> Optional[RollupBatch]: - """Create a batch from pending transactions""" - if len(self.pending_transactions) == 0: - return None - - # Take transactions for batch - batch_txs = self.pending_transactions[:self.max_batch_size] - self.pending_transactions = self.pending_transactions[self.max_batch_size:] - - # Calculate previous state root - previous_state_root = self._calculate_state_root() - - # Process transactions - new_states = self.account_states.copy() - - for tx in batch_txs: - # Skip if account doesn't exist (except for deposits) - if tx.from_address not in new_states and tx.data != "deposit": - continue - - # Process transaction - if tx.data == "deposit": - # Deposits already handled in deposit() - continue - else: - # Regular transfer - sender = new_states[tx.from_address] - receiver = new_states.get(tx.to_address) - - if receiver is None: - receiver = AccountState( - address=tx.to_address, - balance=0, - nonce=0, - storage_root="" - ) - new_states[tx.to_address] = receiver - - # Transfer amount - gas_cost = tx.gas_limit * tx.gas_price - sender.balance -= (tx.amount + gas_cost) - receiver.balance += tx.amount - - # Update states - self.account_states = new_states - new_state_root = self._calculate_state_root() - - # Create merkle root - merkle_root = self._calculate_merkle_root(batch_txs) - - # Create batch - batch = RollupBatch( - batch_id=self.current_batch_id, - transactions=batch_txs, - merkle_root=merkle_root, - zk_proof="", # Will be generated - previous_state_root=previous_state_root, - new_state_root=new_state_root, - timestamp=datetime.utcnow(), - status=RollupStatus.PROVING - ) - - self.batches[self.current_batch_id] = batch - self.current_batch_id += 1 - - logger.info(f"Created batch {batch.batch_id} with {len(batch_txs)} transactions") - - return batch - - async def generate_zk_proof(self, batch: RollupBatch) -> str: - """Generate ZK proof for batch (simulated)""" - logger.info(f"Generating ZK proof for batch {batch.batch_id}") - - # Simulate proof generation time - await asyncio.sleep(self.proving_time) - - # Generate mock proof - proof_data = { - "batch_id": batch.batch_id, - "state_transition": f"{batch.previous_state_root}->{batch.new_state_root}", - "transaction_count": len(batch.transactions), - "timestamp": datetime.utcnow().isoformat() - } - - proof = hashlib.sha256(json.dumps(proof_data, sort_keys=True).encode()).hexdigest() - - # Update batch - batch.zk_proof = proof - batch.status = RollupStatus.COMMITTED - - logger.info(f"Generated ZK proof for batch {batch.batch_id}") - - return proof - - async def submit_to_layer1(self, batch: RollupBatch) -> bool: - """Submit batch to Layer 1 (simulated)""" - logger.info(f"Submitting batch {batch.batch_id} to Layer 1") - - # Simulate network delay - await asyncio.sleep(5) - - # Simulate success - batch.status = RollupStatus.FINALIZED - - logger.info(f"Batch {batch.batch_id} finalized on Layer 1") - - return True - - def withdraw(self, address: str, amount: int) -> str: - """Withdraw funds from rollup to Layer 1""" - if address not in self.account_states: - raise ValueError(f"Account {address} not found") - - if self.account_states[address].balance < amount: - raise ValueError("Insufficient balance") - - # Create withdrawal transaction - withdraw_tx = RollupTransaction( - tx_hash=self._generate_tx_hash("withdraw", address, amount), - from_address=address, - to_address=self.layer1_address, - amount=amount, - gas_limit=21000, - gas_price=0, - nonce=self.account_states[address].nonce, - data="withdraw" - ) - - # Update balance - self.account_states[address].balance -= amount - self.account_states[address].nonce += 1 - - # Add to pending transactions - self.pending_transactions.append(withdraw_tx) - - logger.info(f"Withdrawal of {amount} initiated for {address}") - - return withdraw_tx.tx_hash - - def get_account_balance(self, address: str) -> int: - """Get account balance in rollup""" - if address not in self.account_states: - return 0 - return self.account_states[address].balance - - def get_pending_count(self) -> int: - """Get number of pending transactions""" - return len(self.pending_transactions) - - def get_batch_status(self, batch_id: int) -> Optional[RollupStatus]: - """Get status of a batch""" - if batch_id not in self.batches: - return None - return self.batches[batch_id].status - - def get_rollup_stats(self) -> Dict: - """Get rollup statistics""" - total_txs = sum(len(batch.transactions) for batch in self.batches.values()) - total_accounts = len(self.account_states) - total_balance = sum(state.balance for state in self.account_states.values()) - - return { - "current_batch_id": self.current_batch_id, - "total_batches": len(self.batches), - "total_transactions": total_txs, - "pending_transactions": len(self.pending_transactions), - "total_accounts": total_accounts, - "total_balance": total_balance, - "status": self.status.value - } - - def _generate_tx_hash(self, *args) -> str: - """Generate transaction hash""" - data = "|".join(str(arg) for arg in args) - return hashlib.sha256(data.encode()).hexdigest() - - def _calculate_merkle_root(self, transactions: List[RollupTransaction]) -> str: - """Calculate merkle root of transactions""" - if not transactions: - return hashlib.sha256(b"").hexdigest() - - tx_hashes = [] - for tx in transactions: - tx_data = { - "from": tx.from_address, - "to": tx.to_address, - "amount": tx.amount, - "nonce": tx.nonce - } - tx_hash = hashlib.sha256(json.dumps(tx_data, sort_keys=True).encode()).hexdigest() - tx_hashes.append(tx_hash) - - # Build merkle tree - while len(tx_hashes) > 1: - next_level = [] - for i in range(0, len(tx_hashes), 2): - left = tx_hashes[i] - right = tx_hashes[i + 1] if i + 1 < len(tx_hashes) else left - combined = hashlib.sha256((left + right).encode()).hexdigest() - next_level.append(combined) - tx_hashes = next_level - - return tx_hashes[0] - - def _calculate_state_root(self) -> str: - """Calculate state root""" - if not self.account_states: - return hashlib.sha256(b"").hexdigest() - - # Create sorted list of account states - states = [] - for address, state in sorted(self.account_states.items()): - state_data = { - "address": address, - "balance": state.balance, - "nonce": state.nonce - } - state_hash = hashlib.sha256(json.dumps(state_data, sort_keys=True).encode()).hexdigest() - states.append(state_hash) - - # Reduce to single root - while len(states) > 1: - next_level = [] - for i in range(0, len(states), 2): - left = states[i] - right = states[i + 1] if i + 1 < len(states) else left - combined = hashlib.sha256((left + right).encode()).hexdigest() - next_level.append(combined) - states = next_level - - return states[0] - - async def run_rollup(self, duration_seconds: int = 300): - """Run rollup for specified duration""" - logger.info(f"Running ZK-Rollup for {duration_seconds} seconds") - - start_time = time.time() - batch_count = 0 - - while time.time() - start_time < duration_seconds: - # Create batch if enough transactions - if len(self.pending_transactions) >= 10 or \ - (len(self.pending_transactions) > 0 and time.time() - start_time > 30): - - # Create and process batch - batch = await self.create_batch() - if batch: - # Generate proof - await self.generate_zk_proof(batch) - - # Submit to Layer 1 - await self.submit_to_layer1(batch) - - batch_count += 1 - - # Small delay - await asyncio.sleep(1) - - # Print stats - stats = self.get_rollup_stats() - logger.info(f"\n=== Rollup Statistics ===") - logger.info(f"Batches processed: {batch_count}") - logger.info(f"Total transactions: {stats['total_transactions']}") - logger.info(f"Average TPS: {stats['total_transactions'] / duration_seconds:.2f}") - logger.info(f"Total accounts: {stats['total_accounts']}") - - return stats - - -async def main(): - """Main function to run ZK-Rollup simulation""" - logger.info("Starting ZK-Rollup Simulation") - - # Create rollup - rollup = ZKRollup("0x1234...5678") - - # Create test accounts - accounts = [f"user_{i:04d}" for i in range(100)] - - # Deposit initial funds - for account in accounts[:50]: - amount = random.randint(100, 1000) * 10**18 - rollup.deposit(account, amount) - - # Generate transactions - logger.info("Generating test transactions...") - - for i in range(500): - from_account = random.choice(accounts[:50]) - to_account = random.choice(accounts) - amount = random.randint(1, 100) * 10**18 - - try: - rollup.submit_transaction( - from_address=from_account, - to_address=to_account, - amount=amount, - gas_limit=21000, - gas_price=20 * 10**9 - ) - except ValueError as e: - # Skip invalid transactions - pass - - # Run rollup - stats = await rollup.run_rollup(duration_seconds=60) - - # Print final stats - logger.info("\n=== Final Statistics ===") - for key, value in stats.items(): - logger.info(f"{key}: {value}") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/research/prototypes/sharding/beacon_chain.py b/research/prototypes/sharding/beacon_chain.py deleted file mode 100644 index be808dbb..00000000 --- a/research/prototypes/sharding/beacon_chain.py +++ /dev/null @@ -1,356 +0,0 @@ -""" -Beacon Chain for Sharding Architecture -Coordinates shard chains and manages cross-shard transactions -""" - -import asyncio -import json -import hashlib -import time -from datetime import datetime, timedelta -from typing import Dict, List, Optional, Set -from dataclasses import dataclass, asdict -from enum import Enum -import random -import logging - -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -class ShardStatus(Enum): - """Shard chain status""" - ACTIVE = "active" - SYNCING = "syncing" - OFFLINE = "offline" - - -@dataclass -class ShardInfo: - """Information about a shard""" - shard_id: int - status: ShardStatus - validator_count: int - last_checkpoint: int - gas_price: int - transaction_count: int - cross_shard_txs: int - - -@dataclass -class CrossShardTransaction: - """Cross-shard transaction""" - tx_hash: str - from_shard: int - to_shard: int - sender: str - receiver: str - amount: int - data: str - nonce: int - timestamp: datetime - status: str = "pending" - - -@dataclass -class Checkpoint: - """Beacon chain checkpoint""" - epoch: int - shard_roots: Dict[int, str] - cross_shard_roots: List[str] - validator_set: List[str] - timestamp: datetime - - -class BeaconChain: - """Beacon chain for coordinating shards""" - - def __init__(self, num_shards: int = 64): - self.num_shards = num_shards - self.shards: Dict[int, ShardInfo] = {} - self.current_epoch = 0 - self.checkpoints: List[Checkpoint] = [] - self.cross_shard_pool: List[CrossShardTransaction] = [] - self.validators: Set[str] = set() - self.randao = None - - # Initialize shards - self._initialize_shards() - - def _initialize_shards(self): - """Initialize all shards""" - for i in range(self.num_shards): - self.shards[i] = ShardInfo( - shard_id=i, - status=ShardStatus.ACTIVE, - validator_count=100, - last_checkpoint=0, - gas_price=20 * 10**9, # 20 gwei - transaction_count=0, - cross_shard_txs=0 - ) - - def add_validator(self, validator_address: str): - """Add a validator to the beacon chain""" - self.validators.add(validator_address) - logger.info(f"Added validator: {validator_address}") - - def remove_validator(self, validator_address: str): - """Remove a validator from the beacon chain""" - self.validators.discard(validator_address) - logger.info(f"Removed validator: {validator_address}") - - def get_shard_for_address(self, address: str) -> int: - """Determine which shard an address belongs to""" - hash_bytes = hashlib.sha256(address.encode()).digest() - shard_id = int.from_bytes(hash_bytes[:4], byteorder='big') % self.num_shards - return shard_id - - def submit_cross_shard_transaction( - self, - from_shard: int, - to_shard: int, - sender: str, - receiver: str, - amount: int, - data: str = "" - ) -> str: - """Submit a cross-shard transaction""" - - # Generate transaction hash - tx_data = { - "from_shard": from_shard, - "to_shard": to_shard, - "sender": sender, - "receiver": receiver, - "amount": amount, - "data": data, - "nonce": len(self.cross_shard_pool), - "timestamp": datetime.utcnow().isoformat() - } - - tx_hash = hashlib.sha256(json.dumps(tx_data, sort_keys=True).encode()).hexdigest() - - # Create cross-shard transaction - cross_tx = CrossShardTransaction( - tx_hash=tx_hash, - from_shard=from_shard, - to_shard=to_shard, - sender=sender, - receiver=receiver, - amount=amount, - data=data, - nonce=len(self.cross_shard_pool), - timestamp=datetime.utcnow() - ) - - # Add to pool - self.cross_shard_pool.append(cross_tx) - - # Update shard metrics - if from_shard in self.shards: - self.shards[from_shard].cross_shard_txs += 1 - if to_shard in self.shards: - self.shards[to_shard].cross_shard_txs += 1 - - logger.info(f"Submitted cross-shard tx {tx_hash[:8]} from shard {from_shard} to {to_shard}") - - return tx_hash - - async def process_cross_shard_transactions(self) -> List[str]: - """Process pending cross-shard transactions""" - processed = [] - - # Group transactions by destination shard - shard_groups = {} - for tx in self.cross_shard_pool: - if tx.status == "pending": - if tx.to_shard not in shard_groups: - shard_groups[tx.to_shard] = [] - shard_groups[tx.to_shard].append(tx) - - # Process each group - for shard_id, transactions in shard_groups.items(): - if len(transactions) > 0: - # Create batch for shard - batch_hash = self._create_batch_hash(transactions) - - # Submit to shard (simulated) - success = await self._submit_to_shard(shard_id, batch_hash, transactions) - - if success: - for tx in transactions: - tx.status = "processed" - processed.append(tx.tx_hash) - - logger.info(f"Processed {len(processed)} cross-shard transactions") - - return processed - - def _create_batch_hash(self, transactions: List[CrossShardTransaction]) -> str: - """Create hash for transaction batch""" - tx_hashes = [tx.tx_hash for tx in transactions] - combined = "".join(sorted(tx_hashes)) - return hashlib.sha256(combined.encode()).hexdigest() - - async def _submit_to_shard( - self, - shard_id: int, - batch_hash: str, - transactions: List[CrossShardTransaction] - ) -> bool: - """Submit batch to shard (simulated)""" - # Simulate network delay - await asyncio.sleep(0.01) - - # Simulate success rate - return random.random() > 0.05 # 95% success rate - - def create_checkpoint(self) -> Checkpoint: - """Create a new checkpoint""" - self.current_epoch += 1 - - # Collect shard roots (simulated) - shard_roots = {} - for shard_id in range(self.num_shards): - shard_roots[shard_id] = f"root_{shard_id}_{self.current_epoch}" - - # Collect cross-shard transaction roots - cross_shard_txs = [tx for tx in self.cross_shard_pool if tx.status == "processed"] - cross_shard_roots = [tx.tx_hash for tx in cross_shard_txs[-100:]] # Last 100 - - # Create checkpoint - checkpoint = Checkpoint( - epoch=self.current_epoch, - shard_roots=shard_roots, - cross_shard_roots=cross_shard_roots, - validator_set=list(self.validators), - timestamp=datetime.utcnow() - ) - - self.checkpoints.append(checkpoint) - - # Update shard checkpoint info - for shard_id in range(self.num_shards): - if shard_id in self.shards: - self.shards[shard_id].last_checkpoint = self.current_epoch - - logger.info(f"Created checkpoint {self.current_epoch} with {len(cross_shard_roots)} cross-shard txs") - - return checkpoint - - def get_shard_info(self, shard_id: int) -> Optional[ShardInfo]: - """Get information about a specific shard""" - return self.shards.get(shard_id) - - def get_all_shards(self) -> Dict[int, ShardInfo]: - """Get information about all shards""" - return self.shards.copy() - - def get_cross_shard_pool_size(self) -> int: - """Get number of pending cross-shard transactions""" - return len([tx for tx in self.cross_shard_pool if tx.status == "pending"]) - - def get_network_stats(self) -> Dict: - """Get network-wide statistics""" - total_txs = sum(shard.transaction_count for shard in self.shards.values()) - total_cross_txs = sum(shard.cross_shard_txs for shard in self.shards.values()) - avg_gas_price = sum(shard.gas_price for shard in self.shards.values()) / len(self.shards) - - return { - "epoch": self.current_epoch, - "total_shards": self.num_shards, - "active_shards": sum(1 for s in self.shards.values() if s.status == ShardStatus.ACTIVE), - "total_transactions": total_txs, - "cross_shard_transactions": total_cross_txs, - "pending_cross_shard": self.get_cross_shard_pool_size(), - "average_gas_price": avg_gas_price, - "validator_count": len(self.validators), - "checkpoints": len(self.checkpoints) - } - - async def run_epoch(self): - """Run a single epoch""" - logger.info(f"Starting epoch {self.current_epoch + 1}") - - # Process cross-shard transactions - await self.process_cross_shard_transactions() - - # Create checkpoint - self.create_checkpoint() - - # Randomly update shard metrics - for shard in self.shards.values(): - shard.transaction_count += random.randint(100, 1000) - shard.gas_price = max(10 * 10**9, shard.gas_price + random.randint(-5, 5) * 10**9) - - def simulate_load(self, duration_seconds: int = 60): - """Simulate network load""" - logger.info(f"Simulating load for {duration_seconds} seconds") - - start_time = time.time() - tx_count = 0 - - while time.time() - start_time < duration_seconds: - # Generate random cross-shard transactions - for _ in range(random.randint(5, 20)): - from_shard = random.randint(0, self.num_shards - 1) - to_shard = random.randint(0, self.num_shards - 1) - - if from_shard != to_shard: - self.submit_cross_shard_transaction( - from_shard=from_shard, - to_shard=to_shard, - sender=f"user_{random.randint(0, 9999)}", - receiver=f"user_{random.randint(0, 9999)}", - amount=random.randint(1, 1000) * 10**18, - data=f"transfer_{tx_count}" - ) - tx_count += 1 - - # Small delay - time.sleep(0.1) - - logger.info(f"Generated {tx_count} cross-shard transactions") - - return tx_count - - -async def main(): - """Main function to run beacon chain simulation""" - logger.info("Starting Beacon Chain Sharding Simulation") - - # Create beacon chain - beacon = BeaconChain(num_shards=64) - - # Add validators - for i in range(100): - beacon.add_validator(f"validator_{i:03d}") - - # Simulate initial load - beacon.simulate_load(duration_seconds=5) - - # Run epochs - for epoch in range(5): - await beacon.run_epoch() - - # Print stats - stats = beacon.get_network_stats() - logger.info(f"Epoch {epoch} Stats:") - logger.info(f" Total Transactions: {stats['total_transactions']}") - logger.info(f" Cross-Shard TXs: {stats['cross_shard_transactions']}") - logger.info(f" Pending Cross-Shard: {stats['pending_cross_shard']}") - logger.info(f" Active Shards: {stats['active_shards']}/{stats['total_shards']}") - - # Simulate more load - beacon.simulate_load(duration_seconds=2) - - # Print final stats - final_stats = beacon.get_network_stats() - logger.info("\n=== Final Network Statistics ===") - for key, value in final_stats.items(): - logger.info(f"{key}: {value}") - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/research/standards/eip-aitbc-receipts.md b/research/standards/eip-aitbc-receipts.md deleted file mode 100644 index 168e26cf..00000000 --- a/research/standards/eip-aitbc-receipts.md +++ /dev/null @@ -1,458 +0,0 @@ ---- -eip: 8XXX -title: AITBC Receipt Interoperability Standard -description: Standard format for AI/ML workload receipts enabling cross-chain verification and marketplace interoperability -author: AITBC Research Consortium -discussions-to: https://github.com/ethereum/EIPs/discussions/8XXX -status: Draft -type: Standards Track -category: ERC -created: 2024-01-XX -requires: 712, 191, 1155 ---- - -## Abstract - -This standard defines a universal format for AI/ML workload receipts that enables: -- Cross-chain verification of computation results -- Interoperability between decentralized AI marketplaces -- Standardized metadata for model inference and training -- Cryptographic proof verification across different blockchain networks -- Composable receipt-based workflows - -## Motivation - -The growing ecosystem of decentralized AI marketplaces and blockchain-based AI services lacks a standard for receipt representation. This leads to: -- Fragmented markets with incompatible receipt formats -- Difficulty in verifying computations across chains -- Limited composability between AI services -- Redundant implementations of similar functionality - -By establishing a universal receipt standard, we enable: -- Seamless cross-chain AI service integration -- Unified verification mechanisms -- Enhanced marketplace liquidity -- Reduced development overhead for AI service providers - -## Specification - -### Core Receipt Structure - -```solidity -interface IAITBCReceipt { - struct Receipt { - bytes32 receiptId; // Unique identifier - address provider; // Service provider - address client; // Client who requested - uint256 timestamp; // Execution timestamp - uint256 chainId; // Source chain ID - WorkloadType workloadType; // Type of AI workload - WorkloadMetadata metadata; // Workload-specific data - VerificationProof proof; // Cryptographic proof - bytes signature; // Provider signature - } - - enum WorkloadType { - INFERENCE, - TRAINING, - FINE_TUNING, - VALIDATION - } -} -``` - -### Workload Metadata - -```solidity -struct WorkloadMetadata { - string modelId; // Model identifier - string modelVersion; // Model version - bytes32 modelHash; // Model content hash - bytes32 inputHash; // Input data hash - bytes32 outputHash; // Output data hash - uint256 computeUnits; // Compute resources used - uint256 executionTime; // Execution time in ms - mapping(string => string) customFields; // Extensible metadata -} -``` - -### Verification Proof - -```solidity -struct VerificationProof { - ProofType proofType; // Type of proof - bytes proofData; // Proof bytes - bytes32[] publicInputs; // Public inputs - bytes32[] verificationKeys; // Verification keys - uint256 verificationGas; // Gas required for verification -} -``` - -### Cross-Chain Verification - -```solidity -interface ICrossChainVerifier { - event VerificationRequested( - bytes32 indexed receiptId, - uint256 fromChainId, - uint256 toChainId - ); - - event VerificationCompleted( - bytes32 indexed receiptId, - bool verified, - bytes32 crossChainId - ); - - function verifyReceipt( - Receipt calldata receipt, - uint256 targetChainId - ) external returns (bytes32 crossChainId); - - function submitCrossChainProof( - bytes32 crossChainId, - bytes calldata proof - ) external returns (bool verified); -} -``` - -### Marketplace Integration - -```solidity -interface IAITBCMarketplace { - function listService( - Service calldata service, - ReceiptTemplate calldata template - ) external returns (uint256 serviceId); - - function executeWorkload( - uint256 serviceId, - bytes calldata workloadData - ) external payable returns (Receipt memory receipt); - - function verifyAndSettle( - Receipt calldata receipt - ) external returns (bool settled); -} -``` - -### JSON Representation - -```json -{ - "receiptId": "0x...", - "provider": "0x...", - "client": "0x...", - "timestamp": 1704067200, - "chainId": 1, - "workloadType": "INFERENCE", - "metadata": { - "modelId": "gpt-4", - "modelVersion": "1.0.0", - "modelHash": "0x...", - "inputHash": "0x...", - "outputHash": "0x...", - "computeUnits": 1000, - "executionTime": 2500, - "customFields": { - "temperature": "0.7", - "maxTokens": "1000" - } - }, - "proof": { - "proofType": "ZK_SNARK", - "proofData": "0x...", - "publicInputs": ["0x..."], - "verificationKeys": ["0x..."], - "verificationGas": 50000 - }, - "signature": "0x..." -} -``` - -## Rationale - -### Design Decisions - -1. **Hierarchical Structure**: Receipt contains metadata and proof separately for flexibility -2. **Extensible Metadata**: Custom fields allow for workload-specific extensions -3. **Multiple Proof Types**: Supports ZK-SNARKs, STARKs, and optimistic rollups -4. **Chain Agnostic**: Works across EVM and non-EVM chains -5. **Backwards Compatible**: Builds on existing ERC standards - -### Trade-offs - -1. **Gas Costs**: Comprehensive metadata increases verification costs - - Mitigation: Optional fields and lazy verification -2. **Proof Size**: ZK proofs can be large - - Mitigation: Proof compression and aggregation -3. **Standardization vs Innovation**: Fixed format may limit innovation - - Mitigation: Versioning and extension mechanisms - -## Backwards Compatibility - -This standard is designed to be backwards compatible with: -- **ERC-712**: Typed data signing for receipts -- **ERC-1155**: Multi-token standard for representing receipts as NFTs -- **ERC-191**: Signed data standard for cross-chain verification - -Existing implementations can adopt this standard by: -1. Wrapping current receipt formats -2. Implementing adapter contracts -3. Using migration contracts for gradual transition - -## Security Considerations - -### Provider Misbehavior -- Providers must sign receipts cryptographically -- Slashing conditions for invalid proofs -- Reputation system integration - -### Cross-Chain Risks -- Replay attacks across chains -- Bridge security dependencies -- Finality considerations - -### Privacy Concerns -- Sensitive data in metadata -- Proof leakage risks -- Client privacy protection - -### Mitigations -1. **Cryptographic Guarantees**: All receipts signed by providers -2. **Economic Security**: Stake requirements for providers -3. **Privacy Options**: Zero-knowledge proofs for sensitive data -4. **Audit Trails**: Complete verification history - -## Implementation Guide - -### Basic Implementation - -```solidity -contract AITBCReceipt is IAITBCReceipt { - mapping(bytes32 => Receipt) public receipts; - mapping(address => uint256) public providerNonce; - - function createReceipt( - WorkloadType workloadType, - WorkloadMetadata calldata metadata, - VerificationProof calldata proof - ) external returns (bytes32 receiptId) { - require(providerNonce[msg.sender] == metadata.nonce); - - receiptId = keccak256( - abi.encodePacked( - msg.sender, - block.timestamp, - metadata.modelHash, - metadata.inputHash - ) - ); - - receipts[receiptId] = Receipt({ - receiptId: receiptId, - provider: msg.sender, - client: tx.origin, - timestamp: block.timestamp, - chainId: block.chainid, - workloadType: workloadType, - metadata: metadata, - proof: proof, - signature: new bytes(0) - }); - - providerNonce[msg.sender]++; - emit ReceiptCreated(receiptId, msg.sender); - } -} -``` - -### Cross-Chain Bridge Implementation - -```solidity -contract AITBCBridge is ICrossChainVerifier { - mapping(bytes32 => CrossChainVerification) public verifications; - - function verifyReceipt( - Receipt calldata receipt, - uint256 targetChainId - ) external override returns (bytes32 crossChainId) { - crossChainId = keccak256( - abi.encodePacked( - receipt.receiptId, - targetChainId, - block.timestamp - ) - ); - - verifications[crossChainId] = CrossChainVerification({ - receiptId: receipt.receiptId, - fromChainId: receipt.chainId, - toChainId: targetChainId, - timestamp: block.timestamp, - status: VerificationStatus.PENDING - }); - - emit VerificationRequested(receipt.receiptId, receipt.chainId, targetChainId); - } -} -``` - -## Test Cases - -### Test Case 1: Basic Receipt Creation -```solidity -function testCreateReceipt() public { - WorkloadMetadata memory metadata = WorkloadMetadata({ - modelId: "test-model", - modelVersion: "1.0.0", - modelHash: keccak256("model"), - inputHash: keccak256("input"), - outputHash: keccak256("output"), - computeUnits: 100, - executionTime: 1000, - customFields: new mapping(string => string) - }); - - bytes32 receiptId = receiptContract.createReceipt( - WorkloadType.INFERENCE, - metadata, - proof - ); - - assertTrue(receiptId != bytes32(0)); -} -``` - -### Test Case 2: Cross-Chain Verification -```solidity -function testCrossChainVerification() public { - bytes32 crossChainId = bridge.verifyReceipt(receipt, targetChain); - - assertEq(bridge.getVerificationStatus(crossChainId), VerificationStatus.PENDING); - - // Submit proof on target chain - bool verified = bridgeTarget.submitCrossChainProof( - crossChainId, - crossChainProof - ); - - assertTrue(verified); -} -``` - -## Reference Implementation - -A full reference implementation is available at: -- GitHub: https://github.com/aitbc/receipt-standard -- npm: @aitbc/receipt-standard -- Documentation: https://docs.aitbc.io/receipt-standard - -## Industry Adoption - -### Current Supporters -- [List of supporting organizations] -- [Implemented marketplaces] -- [Tooling providers] - -### Integration Examples -1. **Ethereum Mainnet**: Full implementation with ZK proofs -2. **Polygon**: Optimistic rollup integration -3. **Arbitrum**: STARK-based verification -4. **Cosmos**: IBC integration for cross-chain - -### Migration Path -1. Phase 1: Adapter contracts for existing formats -2. Phase 2: Hybrid implementations -3. Phase 3: Full standard adoption - -## Future Extensions - -### Planned Enhancements -1. **Recursive Proofs**: Nested receipt verification -2. **Batch Verification**: Multiple receipts in one proof -3. **Dynamic Pricing**: Market-based verification costs -4. **AI Model Registry**: On-chain model verification - -### Potential Standards -1. **EIP-XXXX**: AI Model Registry Standard -2. **EIP-XXXX**: Cross-Chain AI Service Protocol -3. **EIP-XXXX**: Decentralized AI Oracles - -## Copyright - -Copyright and related rights waived via CC0. - ---- - -## Appendix A: Full Interface Definition - -```solidity -// SPDX-License-Identifier: CC0-1.0 -pragma solidity ^0.8.0; - -interface IAITBCReceipt { - // Structs - struct Receipt { - bytes32 receiptId; - address provider; - address client; - uint256 timestamp; - uint256 chainId; - WorkloadType workloadType; - WorkloadMetadata metadata; - VerificationProof proof; - bytes signature; - } - - struct WorkloadMetadata { - string modelId; - string modelVersion; - bytes32 modelHash; - bytes32 inputHash; - bytes32 outputHash; - uint256 computeUnits; - uint256 executionTime; - mapping(string => string) customFields; - } - - struct VerificationProof { - ProofType proofType; - bytes proofData; - bytes32[] publicInputs; - bytes32[] verificationKeys; - uint256 verificationGas; - } - - // Enums - enum WorkloadType { INFERENCE, TRAINING, FINE_TUNING, VALIDATION } - enum ProofType { ZK_SNARK, ZK_STARK, OPTIMISTIC, TRUSTED } - - // Events - event ReceiptCreated(bytes32 indexed receiptId, address indexed provider); - event ReceiptVerified(bytes32 indexed receiptId, bool verified); - event ReceiptRevoked(bytes32 indexed receiptId, string reason); - - // Functions - function createReceipt( - WorkloadType workloadType, - WorkloadMetadata calldata metadata, - VerificationProof calldata proof - ) external returns (bytes32 receiptId); - - function verifyReceipt(bytes32 receiptId) external returns (bool verified); - - function revokeReceipt(bytes32 receiptId, string calldata reason) external; - - function getReceipt(bytes32 receiptId) external view returns (Receipt memory); -} -``` - -## Appendix B: Version History - -| Version | Date | Changes | -|---------|------|---------| -| 1.0.0 | 2024-01-XX | Initial draft | -| 1.0.1 | 2024-02-XX | Added cross-chain verification | -| 1.1.0 | 2024-03-XX | Added batch verification support | -| 1.2.0 | 2024-04-XX | Enhanced privacy features | diff --git a/scripts/gpu/gpu_miner_demo.py b/scripts/gpu/gpu_miner_demo.py deleted file mode 100644 index 94c2684d..00000000 --- a/scripts/gpu/gpu_miner_demo.py +++ /dev/null @@ -1,60 +0,0 @@ -#!/usr/bin/env python3 -""" -GPU Miner Registration Demo -Shows what data would be sent to register the GPU -""" - -import json -from datetime import datetime - -# GPU Information from nvidia-smi -GPU_INFO = { - "miner_id": "localhost-gpu-miner", - "capabilities": { - "gpu": { - "model": "NVIDIA GeForce RTX 4060 Ti", - "memory_gb": 16, - "cuda_version": "12.4", - "compute_capability": "8.9", - "driver_version": "550.163.01" - }, - "compute": { - "type": "GPU", - "platform": "CUDA", - "supported_tasks": ["inference", "training", "stable-diffusion", "llama"], - "max_concurrent_jobs": 1 - } - }, - "concurrency": 1, - "region": "localhost" -} - -print("=== GPU Miner Registration Data ===") -print(json.dumps(GPU_INFO, indent=2)) -print("\n=== Registration Endpoint ===") -print("POST http://localhost:8000/miners/register") -print("\n=== Headers ===") -print("Authorization: Bearer REDACTED_MINER_KEY") -print("Content-Type: application/json") -print("\n=== Response Expected ===") -print(""" -{ - "status": "ok", - "session_token": "abc123..." -} -""") - -print("\n=== Current GPU Status ===") -print(f"Model: NVIDIA GeForce RTX 4060 Ti") -print(f"Memory: 16GB (2682MB/16380MB used)") -print(f"Utilization: 9%") -print(f"Temperature: 43°C") -print(f"Status: Available for mining") - -print("\n=== To Start the GPU Miner ===") -print("1. Ensure coordinator API is running on port 8000") -print("2. Run: python simple_gpu_miner.py") -print("3. The miner will:") -print(" - Register GPU capabilities") -print(" - Send heartbeats every 15 seconds") -print(" - Poll for jobs every 3 seconds") diff --git a/scripts/gpu/gpu_miner_real.py b/scripts/gpu/gpu_miner_real.py deleted file mode 100644 index d5334e0d..00000000 --- a/scripts/gpu/gpu_miner_real.py +++ /dev/null @@ -1,329 +0,0 @@ -#!/usr/bin/env python3 -""" -Real GPU Miner Client for AITBC with Ollama integration -""" - -import json -import time -import httpx -import logging -import sys -import subprocess -import os -from datetime import datetime - -# Configuration -COORDINATOR_URL = "http://127.0.0.1:8000" -MINER_ID = "localhost-gpu-miner" -AUTH_TOKEN = "REDACTED_MINER_KEY" -HEARTBEAT_INTERVAL = 15 -MAX_RETRIES = 10 -RETRY_DELAY = 30 - -# Setup logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -# GPU capabilities (RTX 4060 Ti) -GPU_CAPABILITIES = { - "gpu": { - "model": "NVIDIA GeForce RTX 4060 Ti", - "memory_gb": 16, - "cuda_version": "12.4", - "platform": "CUDA", - "supported_tasks": ["inference", "training", "stable-diffusion", "llama"], - "max_concurrent_jobs": 1 - } -} - -def check_gpu_available(): - """Check if GPU is available""" - try: - result = subprocess.run(['nvidia-smi', '--query-gpu=name,memory.total', '--format=csv,noheader,nounits'], - capture_output=True, text=True, timeout=5) - if result.returncode == 0: - gpu_info = result.stdout.strip().split(', ') - logger.info(f"GPU detected: {gpu_info[0]}, Memory: {gpu_info[1]}MB") - return True - else: - logger.error("nvidia-smi failed") - return False - except Exception as e: - logger.error(f"GPU check failed: {e}") - return False - -def check_ollama(): - """Check if Ollama is running""" - try: - response = httpx.get("http://localhost:11434/api/tags", timeout=5) - if response.status_code == 200: - models = response.json().get('models', []) - logger.info(f"Ollama running with {len(models)} models") - return True - else: - logger.error("Ollama not responding") - return False - except Exception as e: - logger.error(f"Ollama check failed: {e}") - return False - -def wait_for_coordinator(): - """Wait for coordinator to be available""" - for i in range(MAX_RETRIES): - try: - response = httpx.get(f"{COORDINATOR_URL}/v1/health", timeout=5) - if response.status_code == 200: - logger.info("Coordinator is available!") - return True - except: - pass - - logger.info(f"Waiting for coordinator... ({i+1}/{MAX_RETRIES})") - time.sleep(RETRY_DELAY) - - logger.error("Coordinator not available after max retries") - return False - -def register_miner(): - """Register the miner with the coordinator""" - register_data = { - "capabilities": GPU_CAPABILITIES, - "concurrency": 1, - "region": "localhost" - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/register?miner_id={MINER_ID}", - json=register_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - data = response.json() - logger.info(f"Successfully registered miner: {data}") - return data.get("session_token", "demo-token") - else: - logger.error(f"Registration failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Registration error: {e}") - return None - -def send_heartbeat(): - """Send heartbeat to coordinator""" - heartbeat_data = { - "status": "active", - "current_jobs": 0, - "last_seen": datetime.utcnow().isoformat(), - "gpu_utilization": 45, # Simulated - "memory_used": 8192, # Simulated - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/heartbeat?miner_id={MINER_ID}", - json=heartbeat_data, - headers=headers, - timeout=5 - ) - - if response.status_code == 200: - logger.info("Heartbeat sent successfully") - else: - logger.error(f"Heartbeat failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Heartbeat error: {e}") - -def execute_job(job): - """Execute a job using GPU resources""" - job_id = job.get('job_id') - payload = job.get('payload', {}) - - logger.info(f"Executing job {job_id}: {payload}") - - try: - if payload.get('type') == 'inference': - # Use Ollama for inference - prompt = payload.get('prompt', '') - model = payload.get('model', 'llama3.2:latest') - - # Call Ollama API - ollama_response = httpx.post( - "http://localhost:11434/api/generate", - json={ - "model": model, - "prompt": prompt, - "stream": False - }, - timeout=60 - ) - - if ollama_response.status_code == 200: - result = ollama_response.json() - output = result.get('response', '') - - # Submit result back to coordinator - submit_result(job_id, { - "status": "completed", - "output": output, - "model": model, - "tokens_processed": result.get('eval_count', 0), - "execution_time": result.get('total_duration', 0) / 1000000000, # Convert to seconds - "gpu_used": True - }) - - logger.info(f"Job {job_id} completed successfully") - return True - else: - logger.error(f"Ollama error: {ollama_response.status_code}") - submit_result(job_id, { - "status": "failed", - "error": f"Ollama error: {ollama_response.text}" - }) - return False - else: - # Unsupported job type - logger.error(f"Unsupported job type: {payload.get('type')}") - submit_result(job_id, { - "status": "failed", - "error": f"Unsupported job type: {payload.get('type')}" - }) - return False - - except Exception as e: - logger.error(f"Job execution error: {e}") - submit_result(job_id, { - "status": "failed", - "error": str(e) - }) - return False - -def submit_result(job_id, result): - """Submit job result to coordinator""" - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/jobs/{job_id}/result", - json=result, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - logger.info(f"Result submitted for job {job_id}") - else: - logger.error(f"Result submission failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Result submission error: {e}") - -def poll_for_jobs(): - """Poll for available jobs""" - poll_data = { - "miner_id": MINER_ID, - "capabilities": GPU_CAPABILITIES, - "max_wait_seconds": 5 - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/poll", - json=poll_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - job = response.json() - logger.info(f"Received job: {job}") - return job - elif response.status_code == 204: - logger.info("No jobs available") - return None - else: - logger.error(f"Poll failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error polling for jobs: {e}") - return None - -def main(): - """Main miner loop""" - logger.info("Starting Real GPU Miner Client...") - - # Check GPU availability (optional) - gpu_available = check_gpu_available() - if not gpu_available: - logger.warning("GPU not available - will run in CPU mode") - - # Check Ollama - if not check_ollama(): - logger.warning("Ollama not available - inference jobs will fail") - - # Wait for coordinator - if not wait_for_coordinator(): - sys.exit(1) - - # Register with coordinator - session_token = register_miner() - if not session_token: - logger.error("Failed to register, exiting") - sys.exit(1) - - logger.info("Miner registered successfully, starting main loop...") - - # Main loop - last_heartbeat = 0 - last_poll = 0 - - try: - while True: - current_time = time.time() - - # Send heartbeat - if current_time - last_heartbeat >= HEARTBEAT_INTERVAL: - send_heartbeat() - last_heartbeat = current_time - - # Poll for jobs - if current_time - last_poll >= 3: - job = poll_for_jobs() - if job: - # Execute the job - execute_job(job) - last_poll = current_time - - time.sleep(1) - - except KeyboardInterrupt: - logger.info("Shutting down miner...") - except Exception as e: - logger.error(f"Error in main loop: {e}") - sys.exit(1) - -if __name__ == "__main__": - main() diff --git a/scripts/gpu/gpu_miner_simple.py b/scripts/gpu/gpu_miner_simple.py deleted file mode 100644 index 66f76b41..00000000 --- a/scripts/gpu/gpu_miner_simple.py +++ /dev/null @@ -1,299 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple GPU Miner Client for AITBC - simulates GPU work -""" - -import json -import time -import httpx -import logging -import sys -import subprocess -from datetime import datetime - -# Configuration -COORDINATOR_URL = "http://127.0.0.1:8000" -MINER_ID = "localhost-gpu-miner" -AUTH_TOKEN = "REDACTED_MINER_KEY" -HEARTBEAT_INTERVAL = 15 -MAX_RETRIES = 10 -RETRY_DELAY = 30 - -# Setup logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -# GPU capabilities (simulated) -GPU_CAPABILITIES = { - "gpu": { - "model": "NVIDIA GeForce RTX 4060 Ti", - "memory_gb": 16, - "cuda_version": "12.4", - "platform": "CUDA", - "supported_tasks": ["inference", "training", "stable-diffusion", "llama"], - "max_concurrent_jobs": 1 - } -} - -def simulate_gpu_work(prompt, duration=3): - """Simulate GPU processing work""" - logger.info(f"Simulating GPU work for: '{prompt}'") - - # Simulate processing time - time.sleep(duration) - - # Generate a simple response based on the prompt - if "hello" in prompt.lower(): - response = "Hello! I'm an AI assistant running on the AITBC network. Your request was processed by a GPU miner." - elif "ai" in prompt.lower(): - response = "AI (Artificial Intelligence) is the simulation of human intelligence in machines that are programmed to think and learn." - elif "blockchain" in prompt.lower(): - response = "Blockchain is a distributed ledger technology that maintains a secure and decentralized record of transactions." - else: - response = f"Processed request: {prompt}. This is a simulated GPU response from the AITBC network." - - return response - -def wait_for_coordinator(): - """Wait for coordinator to be available""" - for i in range(MAX_RETRIES): - try: - response = httpx.get(f"{COORDINATOR_URL}/v1/health", timeout=5) - if response.status_code == 200: - logger.info("Coordinator is available!") - return True - except: - pass - - logger.info(f"Waiting for coordinator... ({i+1}/{MAX_RETRIES})") - time.sleep(RETRY_DELAY) - - logger.error("Coordinator not available after max retries") - return False - -def register_miner(): - """Register the miner with the coordinator""" - register_data = { - "capabilities": GPU_CAPABILITIES, - "concurrency": 1, - "region": "localhost" - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/register?miner_id={MINER_ID}", - json=register_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - data = response.json() - logger.info(f"Successfully registered miner: {data}") - return data.get("session_token", "demo-token") - else: - logger.error(f"Registration failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Registration error: {e}") - return None - -def send_heartbeat(): - """Send heartbeat to coordinator""" - heartbeat_data = { - "status": "active", - "current_jobs": 0, - "last_seen": datetime.utcnow().isoformat(), - "gpu_utilization": 45, # Simulated - "memory_used": 8192, # Simulated - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/heartbeat?miner_id={MINER_ID}", - json=heartbeat_data, - headers=headers, - timeout=5 - ) - - if response.status_code == 200: - logger.info("Heartbeat sent successfully") - else: - logger.error(f"Heartbeat failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Heartbeat error: {e}") - -def execute_job(job): - """Execute a job using simulated GPU processing""" - job_id = job.get('job_id') - payload = job.get('payload', {}) - - logger.info(f"Executing job {job_id}: {payload}") - - try: - if payload.get('type') == 'inference': - # Get the prompt - prompt = payload.get('prompt', '') - - # Simulate GPU processing - logger.info(f"Processing with GPU...") - result_text = simulate_gpu_work(prompt, duration=3) - - # Submit result back to coordinator - submit_result(job_id, { - "result": { - "status": "completed", - "output": result_text, - "model": "simulated-gpu", - "tokens_processed": len(result_text.split()), - "execution_time": 3.0, - "gpu_used": True - }, - "metrics": { - "gpu_utilization": 85, - "memory_used": 2048, - "power_consumption": 250 - } - }) - - logger.info(f"Job {job_id} completed successfully") - return True - else: - # Unsupported job type - logger.error(f"Unsupported job type: {payload.get('type')}") - submit_result(job_id, { - "result": { - "status": "failed", - "error": f"Unsupported job type: {payload.get('type')}" - } - }) - return False - - except Exception as e: - logger.error(f"Job execution error: {e}") - submit_result(job_id, { - "result": { - "status": "failed", - "error": str(e) - } - }) - return False - -def submit_result(job_id, result): - """Submit job result to coordinator""" - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/{job_id}/result", - json=result, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - logger.info(f"Result submitted for job {job_id}") - else: - logger.error(f"Result submission failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Result submission error: {e}") - -def poll_for_jobs(): - """Poll for available jobs""" - poll_data = { - "miner_id": MINER_ID, - "capabilities": GPU_CAPABILITIES, - "max_wait_seconds": 5 - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/poll", - json=poll_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - job = response.json() - logger.info(f"Received job: {job}") - return job - elif response.status_code == 204: - logger.info("No jobs available") - return None - else: - logger.error(f"Poll failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error polling for jobs: {e}") - return None - -def main(): - """Main miner loop""" - logger.info("Starting Simple GPU Miner Client...") - - # Wait for coordinator - if not wait_for_coordinator(): - sys.exit(1) - - # Register with coordinator - session_token = register_miner() - if not session_token: - logger.error("Failed to register, exiting") - sys.exit(1) - - logger.info("Miner registered successfully, starting main loop...") - - # Main loop - last_heartbeat = 0 - last_poll = 0 - - try: - while True: - current_time = time.time() - - # Send heartbeat - if current_time - last_heartbeat >= HEARTBEAT_INTERVAL: - send_heartbeat() - last_heartbeat = current_time - - # Poll for jobs - if current_time - last_poll >= 3: - job = poll_for_jobs() - if job: - # Execute the job - execute_job(job) - last_poll = current_time - - time.sleep(1) - - except KeyboardInterrupt: - logger.info("Shutting down miner...") - except Exception as e: - logger.error(f"Error in main loop: {e}") - sys.exit(1) - -if __name__ == "__main__": - main() diff --git a/scripts/gpu/gpu_miner_with_wait.py b/scripts/gpu/gpu_miner_with_wait.py deleted file mode 100644 index b48a2983..00000000 --- a/scripts/gpu/gpu_miner_with_wait.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python3 -""" -GPU Miner Client with retry logic for AITBC -""" - -import json -import time -import httpx -import logging -import sys -from datetime import datetime - -# Configuration -COORDINATOR_URL = "http://127.0.0.1:8000" -MINER_ID = "localhost-gpu-miner" -AUTH_TOKEN = "REDACTED_MINER_KEY" -HEARTBEAT_INTERVAL = 15 -MAX_RETRIES = 10 -RETRY_DELAY = 30 - -# Setup logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -# GPU capabilities (RTX 4060 Ti) -GPU_CAPABILITIES = { - "gpu": { - "model": "NVIDIA GeForce RTX 4060 Ti", - "memory_gb": 16, - "cuda_version": "12.4", - "compute_capability": "8.9", - "driver_version": "550.163.01" - }, - "compute": { - "type": "GPU", - "platform": "CUDA", - "supported_tasks": ["inference", "training", "stable-diffusion", "llama"], - "max_concurrent_jobs": 1 - } -} - -def wait_for_coordinator(): - """Wait for coordinator to be available""" - for i in range(MAX_RETRIES): - try: - response = httpx.get(f"{COORDINATOR_URL}/v1/health", timeout=5) - if response.status_code == 200: - logger.info("Coordinator is available!") - return True - except: - pass - - logger.info(f"Waiting for coordinator... ({i+1}/{MAX_RETRIES})") - time.sleep(RETRY_DELAY) - - logger.error("Coordinator not available after max retries") - return False - -def register_miner(): - """Register the miner with the coordinator""" - register_data = { - "capabilities": GPU_CAPABILITIES, - "concurrency": 1, - "region": "localhost" - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/register?miner_id={MINER_ID}", - json=register_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - data = response.json() - logger.info(f"Successfully registered miner: {data}") - # Don't require session_token for demo registry - return data.get("session_token", "demo-token") - else: - logger.error(f"Registration failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error registering miner: {e}") - return None - -def send_heartbeat(): - """Send heartbeat to coordinator""" - heartbeat_data = { - "inflight": 0, - "status": "ONLINE", - "metadata": { - "last_seen": datetime.utcnow().isoformat(), - "gpu_utilization": 9, - "gpu_memory_used": 2682, - "gpu_temperature": 43 - } - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/heartbeat?miner_id={MINER_ID}", - json=heartbeat_data, - headers=headers, - timeout=5 - ) - - if response.status_code == 200: - logger.info("Heartbeat sent successfully") - else: - logger.error(f"Heartbeat failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Error sending heartbeat: {e}") - -def poll_for_jobs(): - """Poll for available jobs""" - poll_data = { - "max_wait_seconds": 5 - } - - headers = { - "X-Api-Key": AUTH_TOKEN, - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/v1/miners/poll", - json=poll_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - job = response.json() - logger.info(f"Received job: {job}") - return job - elif response.status_code == 204: - logger.info("No jobs available") - return None - elif response.status_code in (404, 405): - # Coordinator/registry may not implement job polling (e.g. demo registry). - # Keep running (heartbeats still work) but don't spam error logs. - return None - else: - logger.error(f"Poll failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error polling for jobs: {e}") - return None - -def main(): - """Main miner loop""" - logger.info("Starting GPU Miner Client...") - - # Wait for coordinator - if not wait_for_coordinator(): - sys.exit(1) - - # Register with coordinator - session_token = register_miner() - if not session_token: - logger.error("Failed to register, exiting") - sys.exit(1) - - logger.info("Miner registered successfully, starting main loop...") - - # Main loop - last_heartbeat = 0 - last_poll = 0 - - try: - while True: - current_time = time.time() - - # Send heartbeat - if current_time - last_heartbeat >= HEARTBEAT_INTERVAL: - send_heartbeat() - last_heartbeat = current_time - - # Poll for jobs - if current_time - last_poll >= 3: - job = poll_for_jobs() - if job: - logger.info(f"Would execute job: {job}") - last_poll = current_time - - time.sleep(1) - - except KeyboardInterrupt: - logger.info("Shutting down miner...") - except Exception as e: - logger.error(f"Error in main loop: {e}") - sys.exit(1) - -if __name__ == "__main__": - main() diff --git a/scripts/gpu/simple_gpu_miner.py b/scripts/gpu/simple_gpu_miner.py deleted file mode 100644 index 0bc1a3a0..00000000 --- a/scripts/gpu/simple_gpu_miner.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple GPU Miner Client for AITBC -Registers GPU with coordinator and sends heartbeats -""" - -import json -import time -import httpx -import logging -from datetime import datetime - -# Configuration -COORDINATOR_URL = "http://localhost:8000" -MINER_ID = "localhost-gpu-miner" -AUTH_TOKEN = "REDACTED_MINER_KEY" -HEARTBEAT_INTERVAL = 15 - -# Setup logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -# GPU capabilities (RTX 4060 Ti) -GPU_CAPABILITIES = { - "gpu": { - "model": "NVIDIA GeForce RTX 4060 Ti", - "memory_gb": 16, - "cuda_version": "12.4", - "compute_capability": "8.9", - "driver_version": "550.163.01" - }, - "compute": { - "type": "GPU", - "platform": "CUDA", - "supported_tasks": ["inference", "training", "stable-diffusion", "llama"], - "max_concurrent_jobs": 1 - } -} - -def register_miner(): - """Register the miner with the coordinator""" - register_data = { - "capabilities": GPU_CAPABILITIES, - "concurrency": 1, - "region": "localhost" - } - - headers = { - "Authorization": f"Bearer {AUTH_TOKEN}", - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/miners/register", - json=register_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - data = response.json() - logger.info(f"Successfully registered miner: {data}") - return data.get("session_token") - else: - logger.error(f"Registration failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error registering miner: {e}") - return None - -def send_heartbeat(): - """Send heartbeat to coordinator""" - heartbeat_data = { - "inflight": 0, - "status": "ONLINE", - "metadata": { - "last_seen": datetime.utcnow().isoformat(), - "gpu_utilization": 9, # Current GPU utilization from nvidia-smi - "gpu_memory_used": 2682, # MB - "gpu_temperature": 43 - } - } - - headers = { - "Authorization": f"Bearer {AUTH_TOKEN}", - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/miners/heartbeat", - json=heartbeat_data, - headers=headers, - timeout=5 - ) - - if response.status_code == 200: - logger.info("Heartbeat sent successfully") - else: - logger.error(f"Heartbeat failed: {response.status_code} - {response.text}") - - except Exception as e: - logger.error(f"Error sending heartbeat: {e}") - -def poll_for_jobs(): - """Poll for available jobs""" - poll_data = { - "max_wait_seconds": 5 - } - - headers = { - "Authorization": f"Bearer {AUTH_TOKEN}", - "Content-Type": "application/json" - } - - try: - response = httpx.post( - f"{COORDINATOR_URL}/miners/poll", - json=poll_data, - headers=headers, - timeout=10 - ) - - if response.status_code == 200: - job = response.json() - logger.info(f"Received job: {job}") - return job - elif response.status_code == 204: - logger.info("No jobs available") - return None - else: - logger.error(f"Poll failed: {response.status_code} - {response.text}") - return None - - except Exception as e: - logger.error(f"Error polling for jobs: {e}") - return None - -def main(): - """Main miner loop""" - logger.info("Starting GPU Miner Client...") - - # Register with coordinator - session_token = register_miner() - if not session_token: - logger.error("Failed to register, exiting") - return - - logger.info("Miner registered successfully, starting main loop...") - - # Main loop - last_heartbeat = 0 - last_poll = 0 - - try: - while True: - current_time = time.time() - - # Send heartbeat - if current_time - last_heartbeat >= HEARTBEAT_INTERVAL: - send_heartbeat() - last_heartbeat = current_time - - # Poll for jobs - if current_time - last_poll >= 3: # Poll every 3 seconds - job = poll_for_jobs() - if job: - # TODO: Execute job - logger.info(f"Would execute job: {job}") - last_poll = current_time - - time.sleep(1) - - except KeyboardInterrupt: - logger.info("Shutting down miner...") - except Exception as e: - logger.error(f"Error in main loop: {e}") - -if __name__ == "__main__": - main() diff --git a/windsurf/README.md b/windsurf/README.md deleted file mode 100644 index 1415cb0d..00000000 --- a/windsurf/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Windsurf Workspace Assets - -This directory tracks Windsurf-specific prompts, task flows, and workspace settings used to bootstrap and automate development tasks for the AITBC monorepo. diff --git a/windsurf/settings.json b/windsurf/settings.json deleted file mode 100644 index 23d2eba0..00000000 --- a/windsurf/settings.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "editor.formatOnSave": true, - "python.analysis.typeCheckingMode": "basic", - "terminal.integrated.defaultProfile.linux": "zsh" -}