refactor(domain): standardize metadata field naming to meta_data across all models

- Rename metadata fields to meta_data for consistency across domain models
- Update agent_identity, agent_performance, agent_portfolio, amm, analytics, bounty, certification, community, cross_chain_bridge, cross_chain_reputation, decentralized_memory, miner, pricing_models, trading, and wallet models
- Rename chain_metadata to chain_meta_data in CrossChainMapping
- Rename verification_metadata to verification_meta_data
This commit is contained in:
oib
2026-03-03 15:01:48 +01:00
parent f353e00172
commit cabbd2d9b7
34 changed files with 433 additions and 106 deletions

View File

@@ -109,9 +109,9 @@ class PoAProposer:
return return
now = datetime.utcnow() now = datetime.utcnow()
elapsed = (now - head.timestamp).total_seconds() elapsed = (now - head.timestamp).total_seconds()
sleep_for = max(self._config.interval_seconds - elapsed, 0) sleep_for = max(self._config.interval_seconds - elapsed, 0.1)
if sleep_for <= 0: if sleep_for <= 0:
return sleep_for = 0.1
try: try:
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for) await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
except asyncio.TimeoutError: except asyncio.TimeoutError:

View File

@@ -0,0 +1,31 @@
"""
Logging utilities for AITBC coordinator API
"""
import logging
import sys
from typing import Optional
def setup_logger(
name: str,
level: str = "INFO",
format_string: Optional[str] = None
) -> logging.Logger:
"""Setup a logger with consistent formatting"""
if format_string is None:
format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
logger = logging.getLogger(name)
logger.setLevel(getattr(logging, level.upper()))
if not logger.handlers:
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(format_string)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def get_logger(name: str) -> logging.Logger:
"""Get a logger instance"""
return logging.getLogger(name)

View File

@@ -825,6 +825,88 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
] ]
[[package]]
name = "numpy"
version = "2.4.2"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.11"
groups = ["main"]
files = [
{file = "numpy-2.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7e88598032542bd49af7c4747541422884219056c268823ef6e5e89851c8825"},
{file = "numpy-2.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7edc794af8b36ca37ef5fcb5e0d128c7e0595c7b96a2318d1badb6fcd8ee86b1"},
{file = "numpy-2.4.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6e9f61981ace1360e42737e2bae58b27bf28a1b27e781721047d84bd754d32e7"},
{file = "numpy-2.4.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cb7bbb88aa74908950d979eeaa24dbdf1a865e3c7e45ff0121d8f70387b55f73"},
{file = "numpy-2.4.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f069069931240b3fc703f1e23df63443dbd6390614c8c44a87d96cd0ec81eb1"},
{file = "numpy-2.4.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c02ef4401a506fb60b411467ad501e1429a3487abca4664871d9ae0b46c8ba32"},
{file = "numpy-2.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2653de5c24910e49c2b106499803124dde62a5a1fe0eedeaecf4309a5f639390"},
{file = "numpy-2.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1ae241bbfc6ae276f94a170b14785e561cb5e7f626b6688cf076af4110887413"},
{file = "numpy-2.4.2-cp311-cp311-win32.whl", hash = "sha256:df1b10187212b198dd45fa943d8985a3c8cf854aed4923796e0e019e113a1bda"},
{file = "numpy-2.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:b9c618d56a29c9cb1c4da979e9899be7578d2e0b3c24d52079c166324c9e8695"},
{file = "numpy-2.4.2-cp311-cp311-win_arm64.whl", hash = "sha256:47c5a6ed21d9452b10227e5e8a0e1c22979811cad7dcc19d8e3e2fb8fa03f1a3"},
{file = "numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a"},
{file = "numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1"},
{file = "numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e"},
{file = "numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27"},
{file = "numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548"},
{file = "numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f"},
{file = "numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460"},
{file = "numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba"},
{file = "numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f"},
{file = "numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85"},
{file = "numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa"},
{file = "numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c"},
{file = "numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979"},
{file = "numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98"},
{file = "numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef"},
{file = "numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7"},
{file = "numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499"},
{file = "numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb"},
{file = "numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7"},
{file = "numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110"},
{file = "numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622"},
{file = "numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71"},
{file = "numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262"},
{file = "numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913"},
{file = "numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab"},
{file = "numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82"},
{file = "numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f"},
{file = "numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554"},
{file = "numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257"},
{file = "numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657"},
{file = "numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b"},
{file = "numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1"},
{file = "numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b"},
{file = "numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000"},
{file = "numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1"},
{file = "numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74"},
{file = "numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a"},
{file = "numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325"},
{file = "numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909"},
{file = "numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a"},
{file = "numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a"},
{file = "numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75"},
{file = "numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05"},
{file = "numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308"},
{file = "numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef"},
{file = "numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d"},
{file = "numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8"},
{file = "numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5"},
{file = "numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e"},
{file = "numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a"},
{file = "numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443"},
{file = "numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236"},
{file = "numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:89f7268c009bc492f506abd6f5265defa7cb3f7487dc21d357c3d290add45082"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6dee3bb76aa4009d5a912180bf5b2de012532998d094acee25d9cb8dee3e44a"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:cd2bd2bbed13e213d6b55dc1d035a4f91748a7d3edc9480c13898b0353708920"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:cf28c0c1d4c4bf00f509fa7eb02c58d7caf221b50b467bcb0d9bbf1584d5c821"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e04ae107ac591763a47398bb45b568fc38f02dbc4aa44c063f67a131f99346cb"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:602f65afdef699cda27ec0b9224ae5dc43e328f4c24c689deaf77133dbee74d0"},
{file = "numpy-2.4.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be71bf1edb48ebbbf7f6337b5bfd2f895d1902f6335a5830b20141fc126ffba0"},
{file = "numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae"},
]
[[package]] [[package]]
name = "orjson" name = "orjson"
version = "3.11.7" version = "3.11.7"
@@ -1998,4 +2080,4 @@ dev = ["pytest", "setuptools"]
[metadata] [metadata]
lock-version = "2.1" lock-version = "2.1"
python-versions = "^3.13" python-versions = "^3.13"
content-hash = "5860440ac2d60a75338e2ae25a9c9165eb4a640e0a02dc6887e9724365c31f0c" content-hash = "fbcb4ceaf76eb000784572f2a7f7a3d368400290dbcd09bbe4c4467a1549b476"

View File

@@ -25,6 +25,7 @@ prometheus-client = "^0.19.0"
aitbc-crypto = {path = "../../packages/py/aitbc-crypto"} aitbc-crypto = {path = "../../packages/py/aitbc-crypto"}
asyncpg = ">=0.29.0" asyncpg = ">=0.29.0"
aitbc-core = {path = "../../packages/py/aitbc-core"} aitbc-core = {path = "../../packages/py/aitbc-core"}
numpy = "^2.4.2"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pytest = "^8.2.0" pytest = "^8.2.0"

View File

@@ -5,7 +5,7 @@ Provides environment-based adapter selection and consolidated settings.
""" """
import os import os
from pydantic import field_validator from pydantic import Field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict from pydantic_settings import BaseSettings, SettingsConfigDict
from typing import List, Optional from typing import List, Optional
from pathlib import Path from pathlib import Path

View File

@@ -111,7 +111,7 @@ class CrossChainMapping(SQLModel, table=True):
wallet_type: str = Field(default="agent-wallet") # agent-wallet, external-wallet, etc. wallet_type: str = Field(default="agent-wallet") # agent-wallet, external-wallet, etc.
# Chain-specific metadata # Chain-specific metadata
chain_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) chain_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
nonce: Optional[int] = Field(default=None) nonce: Optional[int] = Field(default=None)
# Activity tracking # Activity tracking
@@ -156,7 +156,7 @@ class IdentityVerification(SQLModel, table=True):
renewed_at: Optional[datetime] = Field(default=None) renewed_at: Optional[datetime] = Field(default=None)
# Metadata # Metadata
verification_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) verification_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
# Timestamps # Timestamps
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
@@ -226,7 +226,7 @@ class AgentIdentityCreate(SQLModel):
avatar_url: str = Field(default="") avatar_url: str = Field(default="")
supported_chains: List[int] = Field(default_factory=list) supported_chains: List[int] = Field(default_factory=list)
primary_chain: int = Field(default=1) primary_chain: int = Field(default=1)
metadata: Dict[str, Any] = Field(default_factory=dict) meta_data: Dict[str, Any] = Field(default_factory=dict)
tags: List[str] = Field(default_factory=list) tags: List[str] = Field(default_factory=list)
@@ -239,7 +239,7 @@ class AgentIdentityUpdate(SQLModel):
verification_level: Optional[VerificationType] = Field(default=None) verification_level: Optional[VerificationType] = Field(default=None)
supported_chains: Optional[List[int]] = Field(default=None) supported_chains: Optional[List[int]] = Field(default=None)
primary_chain: Optional[int] = Field(default=None) primary_chain: Optional[int] = Field(default=None)
metadata: Optional[Dict[str, Any]] = Field(default=None) meta_data: Optional[Dict[str, Any]] = Field(default=None)
settings: Optional[Dict[str, Any]] = Field(default=None) settings: Optional[Dict[str, Any]] = Field(default=None)
tags: Optional[List[str]] = Field(default=None) tags: Optional[List[str]] = Field(default=None)
@@ -252,7 +252,7 @@ class CrossChainMappingCreate(SQLModel):
chain_address: str chain_address: str
wallet_address: Optional[str] = Field(default=None) wallet_address: Optional[str] = Field(default=None)
wallet_type: str = Field(default="agent-wallet") wallet_type: str = Field(default="agent-wallet")
chain_metadata: Dict[str, Any] = Field(default_factory=dict) chain_meta_data: Dict[str, Any] = Field(default_factory=dict)
class CrossChainMappingUpdate(SQLModel): class CrossChainMappingUpdate(SQLModel):
@@ -260,7 +260,7 @@ class CrossChainMappingUpdate(SQLModel):
chain_address: Optional[str] = Field(default=None) chain_address: Optional[str] = Field(default=None)
wallet_address: Optional[str] = Field(default=None) wallet_address: Optional[str] = Field(default=None)
wallet_type: Optional[str] = Field(default=None) wallet_type: Optional[str] = Field(default=None)
chain_metadata: Optional[Dict[str, Any]] = Field(default=None) chain_meta_data: Optional[Dict[str, Any]] = Field(default=None)
is_verified: Optional[bool] = Field(default=None) is_verified: Optional[bool] = Field(default=None)
@@ -273,7 +273,7 @@ class IdentityVerificationCreate(SQLModel):
proof_hash: str proof_hash: str
proof_data: Dict[str, Any] = Field(default_factory=dict) proof_data: Dict[str, Any] = Field(default_factory=dict)
expires_at: Optional[datetime] = Field(default=None) expires_at: Optional[datetime] = Field(default=None)
verification_metadata: Dict[str, Any] = Field(default_factory=dict) verification_meta_data: Dict[str, Any] = Field(default_factory=dict)
class AgentWalletCreate(SQLModel): class AgentWalletCreate(SQLModel):
@@ -320,7 +320,7 @@ class AgentIdentityResponse(SQLModel):
total_transactions: int total_transactions: int
successful_transactions: int successful_transactions: int
last_activity: Optional[datetime] last_activity: Optional[datetime]
metadata: Dict[str, Any] meta_data: Dict[str, Any]
tags: List[str] tags: List[str]
created_at: datetime created_at: datetime
updated_at: datetime updated_at: datetime
@@ -337,7 +337,7 @@ class CrossChainMappingResponse(SQLModel):
verified_at: Optional[datetime] verified_at: Optional[datetime]
wallet_address: Optional[str] wallet_address: Optional[str]
wallet_type: str wallet_type: str
chain_metadata: Dict[str, Any] chain_meta_data: Dict[str, Any]
last_transaction: Optional[datetime] last_transaction: Optional[datetime]
transaction_count: int transaction_count: int
created_at: datetime created_at: datetime

View File

@@ -104,7 +104,7 @@ class AgentPerformanceProfile(SQLModel, table=True):
last_assessed: Optional[datetime] = None last_assessed: Optional[datetime] = None
# Additional data # Additional data
profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
performance_notes: str = Field(default="", max_length=1000) performance_notes: str = Field(default="", max_length=1000)
@@ -155,7 +155,7 @@ class MetaLearningModel(SQLModel, table=True):
deployed_at: Optional[datetime] = None deployed_at: Optional[datetime] = None
# Additional data # Additional data
model_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) model_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -206,7 +206,7 @@ class ResourceAllocation(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow()) updated_at: datetime = Field(default_factory=datetime.utcnow())
# Additional data # Additional data
allocation_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) allocation_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
resource_utilization: Dict[str, float] = Field(default={}, sa_column=Column(JSON)) resource_utilization: Dict[str, float] = Field(default={}, sa_column=Column(JSON))
@@ -261,7 +261,7 @@ class PerformanceOptimization(SQLModel, table=True):
completed_at: Optional[datetime] = None completed_at: Optional[datetime] = None
# Additional data # Additional data
optimization_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) optimization_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
performance_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) performance_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -315,7 +315,7 @@ class AgentCapability(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
# Additional data # Additional data
capability_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) capability_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
training_history: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) training_history: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -368,7 +368,7 @@ class FusionModel(SQLModel, table=True):
deployed_at: Optional[datetime] = None deployed_at: Optional[datetime] = None
# Additional data # Additional data
fusion_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) fusion_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -423,7 +423,7 @@ class ReinforcementLearningConfig(SQLModel, table=True):
deployed_at: Optional[datetime] = None deployed_at: Optional[datetime] = None
# Additional data # Additional data
rl_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) rl_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) training_logs: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -477,5 +477,5 @@ class CreativeCapability(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
# Additional data # Additional data
creative_profile_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) creative_profile_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
portfolio_samples: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) portfolio_samples: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))

View File

@@ -193,7 +193,7 @@ class PortfolioAlert(SQLModel, table=True):
alert_type: str = Field(index=True) # Type of alert alert_type: str = Field(index=True) # Type of alert
severity: str = Field(index=True) # Severity level severity: str = Field(index=True) # Severity level
message: str = Field(default="") message: str = Field(default="")
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
is_acknowledged: bool = Field(default=False, index=True) is_acknowledged: bool = Field(default=False, index=True)
acknowledged_at: Optional[datetime] = Field(default=None) acknowledged_at: Optional[datetime] = Field(default=None)
created_at: datetime = Field(default_factory=datetime.utcnow, index=True) created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
@@ -213,7 +213,7 @@ class StrategySignal(SQLModel, table=True):
stop_loss: float = Field(default=0.0) # Stop loss price stop_loss: float = Field(default=0.0) # Stop loss price
time_horizon: str = Field(default="1d") # Time horizon time_horizon: str = Field(default="1d") # Time horizon
reasoning: str = Field(default="") # Signal reasoning reasoning: str = Field(default="") # Signal reasoning
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
is_executed: bool = Field(default=False, index=True) is_executed: bool = Field(default=False, index=True)
executed_at: Optional[datetime] = Field(default=None) executed_at: Optional[datetime] = Field(default=None)
expires_at: datetime = Field(default_factory=lambda: datetime.utcnow() + timedelta(hours=24)) expires_at: datetime = Field(default_factory=lambda: datetime.utcnow() + timedelta(hours=24))

View File

@@ -267,7 +267,7 @@ class PoolAlert(SQLModel, table=True):
severity: str = Field(index=True) # LOW, MEDIUM, HIGH, CRITICAL severity: str = Field(index=True) # LOW, MEDIUM, HIGH, CRITICAL
title: str = Field(default="") title: str = Field(default="")
message: str = Field(default="") message: str = Field(default="")
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
threshold_value: float = Field(default=0.0) # Threshold that triggered alert threshold_value: float = Field(default=0.0) # Threshold that triggered alert
current_value: float = Field(default=0.0) # Current value current_value: float = Field(default=0.0) # Current value
is_acknowledged: bool = Field(default=False, index=True) is_acknowledged: bool = Field(default=False, index=True)

View File

@@ -81,7 +81,7 @@ class MarketMetric(SQLModel, table=True):
trade_type: Optional[str] = None trade_type: Optional[str] = None
# Metadata # Metadata
metric_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) metric_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
# Timestamps # Timestamps
recorded_at: datetime = Field(default_factory=datetime.utcnow) recorded_at: datetime = Field(default_factory=datetime.utcnow)
@@ -187,7 +187,7 @@ class AnalyticsReport(SQLModel, table=True):
delivered_at: Optional[datetime] = None delivered_at: Optional[datetime] = None
# Additional data # Additional data
report_metric_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) report_metric_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
template_used: Optional[str] = None template_used: Optional[str] = None
@@ -282,7 +282,7 @@ class DataCollectionJob(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
# Additional data # Additional data
job_metric_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) job_metric_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
execution_log: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) execution_log: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
@@ -332,7 +332,7 @@ class AlertRule(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
# Additional data # Additional data
rule_metric_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) rule_metric_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
test_results: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) test_results: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))

View File

@@ -234,7 +234,7 @@ class AgentMetrics(SQLModel, table=True):
monthly_earnings: List[float] = Field(default_factory=list, sa_column=Column(JSON)) monthly_earnings: List[float] = Field(default_factory=list, sa_column=Column(JSON))
# Metadata # Metadata
agent_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) agent_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
# Relationships # Relationships
stakes: List[AgentStake] = Relationship(back_populates="agent_metrics") stakes: List[AgentStake] = Relationship(back_populates="agent_metrics")
@@ -278,7 +278,7 @@ class StakingPool(SQLModel, table=True):
volatility_score: float = Field(default=0.0) volatility_score: float = Field(default=0.0)
# Metadata # Metadata
pool_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) pool_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
# Indexes # Indexes
__table_args__ = ( __table_args__ = (
@@ -366,7 +366,7 @@ class BountyStats(SQLModel, table=True):
tier_distribution: Dict[str, int] = Field(default_factory=dict, sa_column=Column(JSON)) tier_distribution: Dict[str, int] = Field(default_factory=dict, sa_column=Column(JSON))
# Metadata # Metadata
stats_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) stats_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
# Indexes # Indexes
__table_args__ = ( __table_args__ = (

View File

@@ -100,7 +100,7 @@ class AgentCertification(SQLModel, table=True):
last_verified_at: Optional[datetime] = None last_verified_at: Optional[datetime] = None
# Additional data # Additional data
cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
notes: str = Field(default="", max_length=1000) notes: str = Field(default="", max_length=1000)
@@ -144,7 +144,7 @@ class CertificationRequirement(SQLModel, table=True):
expiry_date: Optional[datetime] = None expiry_date: Optional[datetime] = None
# Additional data # Additional data
cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
class VerificationRecord(SQLModel, table=True): class VerificationRecord(SQLModel, table=True):
@@ -194,7 +194,7 @@ class VerificationRecord(SQLModel, table=True):
audit_trail: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON)) audit_trail: List[Dict[str, Any]] = Field(default=[], sa_column=Column(JSON))
# Additional data # Additional data
cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
notes: str = Field(default="", max_length=1000) notes: str = Field(default="", max_length=1000)
@@ -244,7 +244,7 @@ class PartnershipProgram(SQLModel, table=True):
expires_at: Optional[datetime] = None expires_at: Optional[datetime] = None
# Additional data # Additional data
program_cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) program_cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
contact_info: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) contact_info: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
@@ -295,7 +295,7 @@ class AgentPartnership(SQLModel, table=True):
last_activity: Optional[datetime] = None last_activity: Optional[datetime] = None
# Additional data # Additional data
partnership_cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) partnership_cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
notes: str = Field(default="", max_length=1000) notes: str = Field(default="", max_length=1000)
@@ -341,7 +341,7 @@ class AchievementBadge(SQLModel, table=True):
available_until: Optional[datetime] = None available_until: Optional[datetime] = None
# Additional data # Additional data
badge_cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) badge_cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
requirements_text: str = Field(default="", max_length=1000) requirements_text: str = Field(default="", max_length=1000)
@@ -392,7 +392,7 @@ class AgentBadge(SQLModel, table=True):
last_viewed_at: Optional[datetime] = None last_viewed_at: Optional[datetime] = None
# Additional data # Additional data
badge_cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) badge_cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
notes: str = Field(default="", max_length=1000) notes: str = Field(default="", max_length=1000)
@@ -449,5 +449,5 @@ class CertificationAudit(SQLModel, table=True):
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
# Additional data # Additional data
audit_cert_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) audit_cert_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
notes: str = Field(default="", max_length=2000) notes: str = Field(default="", max_length=2000)

View File

@@ -80,7 +80,7 @@ class AgentSolution(SQLModel, table=True):
average_rating: float = Field(default=0.0) average_rating: float = Field(default=0.0)
review_count: int = Field(default=0) review_count: int = Field(default=0)
solution_metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) solution_meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)

View File

@@ -159,7 +159,7 @@ class Validator(SQLModel, table=True):
status: ValidatorStatus = Field(default=ValidatorStatus.ACTIVE, index=True) status: ValidatorStatus = Field(default=ValidatorStatus.ACTIVE, index=True)
is_active: bool = Field(default=True, index=True) is_active: bool = Field(default=True, index=True)
supported_chains: List[int] = Field(default_factory=list, sa_column=Column(JSON)) supported_chains: List[int] = Field(default_factory=list, sa_column=Column(JSON))
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) val_meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)
@@ -274,7 +274,7 @@ class BridgeAlert(SQLModel, table=True):
bridge_request_id: Optional[int] = Field(default=None, index=True) bridge_request_id: Optional[int] = Field(default=None, index=True)
title: str = Field(default="") title: str = Field(default="")
message: str = Field(default="") message: str = Field(default="")
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) val_meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
threshold_value: float = Field(default=0.0) # Threshold that triggered alert threshold_value: float = Field(default=0.0) # Threshold that triggered alert
current_value: float = Field(default=0.0) # Current value current_value: float = Field(default=0.0) # Current value
is_acknowledged: bool = Field(default=False, index=True) is_acknowledged: bool = Field(default=False, index=True)

View File

@@ -210,7 +210,7 @@ class CrossChainReputationResponse(SQLModel):
consistency_score: float consistency_score: float
verification_status: str verification_status: str
last_updated: datetime last_updated: datetime
metadata: Dict[str, Any] = Field(default_factory=dict) meta_data: Dict[str, Any] = Field(default_factory=dict)
class CrossChainAnalyticsResponse(SQLModel): class CrossChainAnalyticsResponse(SQLModel):
@@ -241,7 +241,7 @@ class ReputationAnomalyResponse(SQLModel):
current_score: float current_score: float
score_change: float score_change: float
confidence: float confidence: float
metadata: Dict[str, Any] = Field(default_factory=dict) meta_data: Dict[str, Any] = Field(default_factory=dict)
class CrossChainLeaderboardResponse(SQLModel): class CrossChainLeaderboardResponse(SQLModel):

View File

@@ -46,7 +46,7 @@ class AgentMemoryNode(SQLModel, table=True):
status: StorageStatus = Field(default=StorageStatus.PENDING, index=True) status: StorageStatus = Field(default=StorageStatus.PENDING, index=True)
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
tags: List[str] = Field(default_factory=list, sa_column=Column(JSON)) tags: List[str] = Field(default_factory=list, sa_column=Column(JSON))
# Blockchain Anchoring # Blockchain Anchoring

View File

@@ -17,7 +17,7 @@ class Miner(SQLModel, table=True):
concurrency: int = Field(default=1) concurrency: int = Field(default=1)
status: str = Field(default="ONLINE", index=True) status: str = Field(default="ONLINE", index=True)
inflight: int = Field(default=0) inflight: int = Field(default=0)
extra_metadata: dict = Field(default_factory=dict, sa_column=Column(JSON, nullable=False)) extra_meta_data: dict = Field(default_factory=dict, sa_column=Column(JSON, nullable=False))
last_heartbeat: datetime = Field(default_factory=datetime.utcnow, index=True) last_heartbeat: datetime = Field(default_factory=datetime.utcnow, index=True)
session_token: Optional[str] = None session_token: Optional[str] = None
last_job_at: Optional[datetime] = Field(default=None, index=True) last_job_at: Optional[datetime] = Field(default=None, index=True)

View File

@@ -527,7 +527,7 @@ class PricingAuditLog(SQLModel, table=True):
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
# Additional metadata # Additional metadata
metadata: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON))
tags: List[str] = Field(default_factory=list, sa_column=Column(JSON)) tags: List[str] = Field(default_factory=list, sa_column=Column(JSON))

View File

@@ -105,7 +105,7 @@ class TradeRequest(SQLModel, table=True):
# Additional metadata # Additional metadata
tags: List[str] = Field(default=[], sa_column=Column(JSON)) tags: List[str] = Field(default=[], sa_column=Column(JSON))
trading_metadata: Dict[str, Any] = Field(default={}, sa_column=Column(JSON)) trading_meta_data: Dict[str, Any] = Field(default={}, sa_column=Column(JSON))
class TradeMatch(SQLModel, table=True): class TradeMatch(SQLModel, table=True):

View File

@@ -38,7 +38,7 @@ class AgentWallet(SQLModel, table=True):
is_active: bool = Field(default=True) is_active: bool = Field(default=True)
encrypted_private_key: Optional[str] = Field(default=None) # Only if managed internally encrypted_private_key: Optional[str] = Field(default=None) # Only if managed internally
kms_key_id: Optional[str] = Field(default=None) # Reference to external KMS kms_key_id: Optional[str] = Field(default=None) # Reference to external KMS
metadata: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON)) meta_data: Dict[str, str] = Field(default_factory=dict, sa_column=Column(JSON))
created_at: datetime = Field(default_factory=datetime.utcnow) created_at: datetime = Field(default_factory=datetime.utcnow)
updated_at: datetime = Field(default_factory=datetime.utcnow) updated_at: datetime = Field(default_factory=datetime.utcnow)

View File

@@ -17,8 +17,8 @@ from ..domain.agent_identity import (
AgentWalletUpdate, AgentIdentityResponse, CrossChainMappingResponse, AgentWalletUpdate, AgentIdentityResponse, CrossChainMappingResponse,
AgentWalletResponse AgentWalletResponse
) )
from ..services.database import get_session from ..storage.db import get_session
from .manager import AgentIdentityManager from ..agent_identity.manager import AgentIdentityManager
router = APIRouter(prefix="/agent-identity", tags=["Agent Identity"]) router = APIRouter(prefix="/agent-identity", tags=["Agent Identity"])

View File

@@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlmodel import Session, select, func, Field from sqlmodel import Session, select, func, Field
from ..services.database import get_session from ..storage.db import get_session
from ..agent_identity.wallet_adapter_enhanced import ( from ..agent_identity.wallet_adapter_enhanced import (
EnhancedWalletAdapter, WalletAdapterFactory, SecurityLevel, EnhancedWalletAdapter, WalletAdapterFactory, SecurityLevel,
WalletStatus, TransactionStatus WalletStatus, TransactionStatus

View File

@@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlmodel import Session, select, func from sqlmodel import Session, select, func
from ..services.database import get_session from ..storage.db import get_session
from ..domain.developer_platform import ( from ..domain.developer_platform import (
DeveloperProfile, DeveloperCertification, RegionalHub, DeveloperProfile, DeveloperCertification, RegionalHub,
BountyTask, BountySubmission, BountyStatus, CertificationLevel BountyTask, BountySubmission, BountyStatus, CertificationLevel

View File

@@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlmodel import Session, select, func, Field from sqlmodel import Session, select, func, Field
from ..services.database import get_session from ..storage.db import get_session
from ..domain.global_marketplace import ( from ..domain.global_marketplace import (
GlobalMarketplaceOffer, GlobalMarketplaceTransaction, GlobalMarketplaceAnalytics, GlobalMarketplaceOffer, GlobalMarketplaceTransaction, GlobalMarketplaceAnalytics,
MarketplaceRegion, GlobalMarketplaceConfig, RegionStatus, MarketplaceStatus MarketplaceRegion, GlobalMarketplaceConfig, RegionStatus, MarketplaceStatus

View File

@@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlmodel import Session, select, func, Field from sqlmodel import Session, select, func, Field
from ..services.database import get_session from ..storage.db import get_session
from ..domain.global_marketplace import ( from ..domain.global_marketplace import (
GlobalMarketplaceOffer, GlobalMarketplaceTransaction, GlobalMarketplaceAnalytics, GlobalMarketplaceOffer, GlobalMarketplaceTransaction, GlobalMarketplaceAnalytics,
MarketplaceRegion, RegionStatus, MarketplaceStatus MarketplaceRegion, RegionStatus, MarketplaceStatus

View File

@@ -11,7 +11,7 @@ from fastapi import APIRouter, HTTPException, Depends, Query, BackgroundTasks
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from sqlmodel import Session, select, func from sqlmodel import Session, select, func
from ..services.database import get_session from ..storage.db import get_session
from ..domain.governance import ( from ..domain.governance import (
GovernanceProfile, Proposal, Vote, DaoTreasury, TransparencyReport, GovernanceProfile, Proposal, Vote, DaoTreasury, TransparencyReport,
ProposalStatus, VoteType, GovernanceRole ProposalStatus, VoteType, GovernanceRole

View File

@@ -19,7 +19,7 @@ from sqlalchemy.exc import SQLAlchemyError
from ..domain.cross_chain_bridge import ( from ..domain.cross_chain_bridge import (
BridgeRequestStatus, ChainType, TransactionType, ValidatorStatus, BridgeRequestStatus, ChainType, TransactionType, ValidatorStatus,
CrossChainBridgeRequest, BridgeValidator, BridgeLiquidityPool BridgeRequest, Validator
) )
from ..domain.agent_identity import AgentWallet, CrossChainMapping from ..domain.agent_identity import AgentWallet, CrossChainMapping
from ..agent_identity.wallet_adapter_enhanced import ( from ..agent_identity.wallet_adapter_enhanced import (
@@ -139,7 +139,7 @@ class CrossChainBridgeService:
protocol = protocol or BridgeProtocol(source_config["protocol"]) protocol = protocol or BridgeProtocol(source_config["protocol"])
# Create bridge request # Create bridge request
bridge_request = CrossChainBridgeRequest( bridge_request = BridgeRequest(
id=f"bridge_{uuid4().hex[:8]}", id=f"bridge_{uuid4().hex[:8]}",
user_address=user_address, user_address=user_address,
source_chain_id=source_chain_id, source_chain_id=source_chain_id,
@@ -191,8 +191,8 @@ class CrossChainBridgeService:
"""Get status of a bridge request""" """Get status of a bridge request"""
try: try:
stmt = select(CrossChainBridgeRequest).where( stmt = select(BridgeRequest).where(
CrossChainBridgeRequest.id == bridge_request_id BridgeRequest.id == bridge_request_id
) )
bridge_request = self.session.exec(stmt).first() bridge_request = self.session.exec(stmt).first()
@@ -263,8 +263,8 @@ class CrossChainBridgeService:
"""Cancel a bridge request""" """Cancel a bridge request"""
try: try:
stmt = select(CrossChainBridgeRequest).where( stmt = select(BridgeRequest).where(
CrossChainBridgeRequest.id == bridge_request_id BridgeRequest.id == bridge_request_id
) )
bridge_request = self.session.exec(stmt).first() bridge_request = self.session.exec(stmt).first()
@@ -307,32 +307,32 @@ class CrossChainBridgeService:
# Get total requests # Get total requests
total_requests = self.session.exec( total_requests = self.session.exec(
select(func.count(CrossChainBridgeRequest.id)).where( select(func.count(BridgeRequest.id)).where(
CrossChainBridgeRequest.created_at >= cutoff_time BridgeRequest.created_at >= cutoff_time
) )
).scalar() or 0 ).scalar() or 0
# Get completed requests # Get completed requests
completed_requests = self.session.exec( completed_requests = self.session.exec(
select(func.count(CrossChainBridgeRequest.id)).where( select(func.count(BridgeRequest.id)).where(
CrossChainBridgeRequest.created_at >= cutoff_time, BridgeRequest.created_at >= cutoff_time,
CrossChainBridgeRequest.status == BridgeRequestStatus.COMPLETED BridgeRequest.status == BridgeRequestStatus.COMPLETED
) )
).scalar() or 0 ).scalar() or 0
# Get total volume # Get total volume
total_volume = self.session.exec( total_volume = self.session.exec(
select(func.sum(CrossChainBridgeRequest.amount)).where( select(func.sum(BridgeRequest.amount)).where(
CrossChainBridgeRequest.created_at >= cutoff_time, BridgeRequest.created_at >= cutoff_time,
CrossChainBridgeRequest.status == BridgeRequestStatus.COMPLETED BridgeRequest.status == BridgeRequestStatus.COMPLETED
) )
).scalar() or 0 ).scalar() or 0
# Get total fees # Get total fees
total_fees = self.session.exec( total_fees = self.session.exec(
select(func.sum(CrossChainBridgeRequest.total_fee)).where( select(func.sum(BridgeRequest.total_fee)).where(
CrossChainBridgeRequest.created_at >= cutoff_time, BridgeRequest.created_at >= cutoff_time,
CrossChainBridgeRequest.status == BridgeRequestStatus.COMPLETED BridgeRequest.status == BridgeRequestStatus.COMPLETED
) )
).scalar() or 0 ).scalar() or 0
@@ -342,11 +342,11 @@ class CrossChainBridgeService:
# Get average processing time # Get average processing time
avg_processing_time = self.session.exec( avg_processing_time = self.session.exec(
select(func.avg( select(func.avg(
func.extract('epoch', CrossChainBridgeRequest.completed_at) - func.extract('epoch', BridgeRequest.completed_at) -
func.extract('epoch', CrossChainBridgeRequest.created_at) func.extract('epoch', BridgeRequest.created_at)
)).where( )).where(
CrossChainBridgeRequest.created_at >= cutoff_time, BridgeRequest.created_at >= cutoff_time,
CrossChainBridgeRequest.status == BridgeRequestStatus.COMPLETED BridgeRequest.status == BridgeRequestStatus.COMPLETED
) )
).scalar() or 0 ).scalar() or 0
@@ -354,9 +354,9 @@ class CrossChainBridgeService:
chain_distribution = {} chain_distribution = {}
for chain_id in self.wallet_adapters.keys(): for chain_id in self.wallet_adapters.keys():
chain_requests = self.session.exec( chain_requests = self.session.exec(
select(func.count(CrossChainBridgeRequest.id)).where( select(func.count(BridgeRequest.id)).where(
CrossChainBridgeRequest.created_at >= cutoff_time, BridgeRequest.created_at >= cutoff_time,
CrossChainBridgeRequest.source_chain_id == chain_id BridgeRequest.source_chain_id == chain_id
) )
).scalar() or 0 ).scalar() or 0
@@ -410,8 +410,8 @@ class CrossChainBridgeService:
"""Process a bridge request""" """Process a bridge request"""
try: try:
stmt = select(CrossChainBridgeRequest).where( stmt = select(BridgeRequest).where(
CrossChainBridgeRequest.id == bridge_request_id BridgeRequest.id == bridge_request_id
) )
bridge_request = self.session.exec(stmt).first() bridge_request = self.session.exec(stmt).first()
@@ -438,8 +438,8 @@ class CrossChainBridgeService:
logger.error(f"Error processing bridge request {bridge_request_id}: {e}") logger.error(f"Error processing bridge request {bridge_request_id}: {e}")
# Update status to failed # Update status to failed
try: try:
stmt = update(CrossChainBridgeRequest).where( stmt = update(BridgeRequest).where(
CrossChainBridgeRequest.id == bridge_request_id BridgeRequest.id == bridge_request_id
).values( ).values(
status=BridgeRequestStatus.FAILED, status=BridgeRequestStatus.FAILED,
error_message=str(e), error_message=str(e),
@@ -450,7 +450,7 @@ class CrossChainBridgeService:
except: except:
pass pass
async def _execute_atomic_swap(self, bridge_request: CrossChainBridgeRequest) -> None: async def _execute_atomic_swap(self, bridge_request: BridgeRequest) -> None:
"""Execute atomic swap protocol""" """Execute atomic swap protocol"""
try: try:
@@ -510,7 +510,7 @@ class CrossChainBridgeService:
logger.error(f"Error executing atomic swap: {e}") logger.error(f"Error executing atomic swap: {e}")
raise raise
async def _execute_liquidity_pool_swap(self, bridge_request: CrossChainBridgeRequest) -> None: async def _execute_liquidity_pool_swap(self, bridge_request: BridgeRequest) -> None:
"""Execute liquidity pool swap""" """Execute liquidity pool swap"""
try: try:
@@ -549,7 +549,7 @@ class CrossChainBridgeService:
logger.error(f"Error executing liquidity pool swap: {e}") logger.error(f"Error executing liquidity pool swap: {e}")
raise raise
async def _execute_htlc_swap(self, bridge_request: CrossChainBridgeRequest) -> None: async def _execute_htlc_swap(self, bridge_request: BridgeRequest) -> None:
"""Execute HTLC (Hashed Timelock Contract) swap""" """Execute HTLC (Hashed Timelock Contract) swap"""
try: try:
@@ -604,7 +604,7 @@ class CrossChainBridgeService:
logger.error(f"Error executing HTLC swap: {e}") logger.error(f"Error executing HTLC swap: {e}")
raise raise
async def _create_atomic_swap_contract(self, bridge_request: CrossChainBridgeRequest, direction: str) -> Dict[str, Any]: async def _create_atomic_swap_contract(self, bridge_request: BridgeRequest, direction: str) -> Dict[str, Any]:
"""Create atomic swap contract data""" """Create atomic swap contract data"""
# Mock implementation # Mock implementation
contract_address = f"0x{hashlib.sha256(f'atomic_swap_{bridge_request.id}_{direction}'.encode()).hexdigest()[:40]}" contract_address = f"0x{hashlib.sha256(f'atomic_swap_{bridge_request.id}_{direction}'.encode()).hexdigest()[:40]}"
@@ -615,7 +615,7 @@ class CrossChainBridgeService:
"contract_data": contract_data "contract_data": contract_data
} }
async def _create_liquidity_pool_swap_data(self, bridge_request: CrossChainBridgeRequest, pool: Dict[str, Any]) -> Dict[str, Any]: async def _create_liquidity_pool_swap_data(self, bridge_request: BridgeRequest, pool: Dict[str, Any]) -> Dict[str, Any]:
"""Create liquidity pool swap data""" """Create liquidity pool swap data"""
# Mock implementation # Mock implementation
pool_address = pool.get("address", f"0x{hashlib.sha256(f'pool_{bridge_request.source_chain_id}_{bridge_request.target_chain_id}'.encode()).hexdigest()[:40]}") pool_address = pool.get("address", f"0x{hashlib.sha256(f'pool_{bridge_request.source_chain_id}_{bridge_request.target_chain_id}'.encode()).hexdigest()[:40]}")
@@ -626,7 +626,7 @@ class CrossChainBridgeService:
"swap_data": swap_data "swap_data": swap_data
} }
async def _create_htlc_contract(self, bridge_request: CrossChainBridgeRequest, secret_hash: str, direction: str) -> Dict[str, Any]: async def _create_htlc_contract(self, bridge_request: BridgeRequest, secret_hash: str, direction: str) -> Dict[str, Any]:
"""Create HTLC contract data""" """Create HTLC contract data"""
contract_address = f"0x{hashlib.sha256(f'htlc_{bridge_request.id}_{direction}_{secret_hash}'.encode()).hexdigest()[:40]}" contract_address = f"0x{hashlib.sha256(f'htlc_{bridge_request.id}_{direction}_{secret_hash}'.encode()).hexdigest()[:40]}"
contract_data = f"0x{hashlib.sha256(f'htlc_data_{bridge_request.id}_{secret_hash}'.encode()).hexdigest()}" contract_data = f"0x{hashlib.sha256(f'htlc_data_{bridge_request.id}_{secret_hash}'.encode()).hexdigest()}"
@@ -637,7 +637,7 @@ class CrossChainBridgeService:
"secret_hash": secret_hash "secret_hash": secret_hash
} }
async def _complete_htlc(self, bridge_request: CrossChainBridgeRequest, secret: str) -> None: async def _complete_htlc(self, bridge_request: BridgeRequest, secret: str) -> None:
"""Complete HTLC by revealing secret""" """Complete HTLC by revealing secret"""
# Mock implementation # Mock implementation
bridge_request.target_transaction_hash = f"0x{hashlib.sha256(f'htlc_complete_{bridge_request.id}_{secret}'.encode()).hexdigest()}" bridge_request.target_transaction_hash = f"0x{hashlib.sha256(f'htlc_complete_{bridge_request.id}_{secret}'.encode()).hexdigest()}"
@@ -717,7 +717,7 @@ class CrossChainBridgeService:
logger.error(f"Error waiting for confirmations: {e}") logger.error(f"Error waiting for confirmations: {e}")
raise raise
async def _calculate_bridge_progress(self, bridge_request: CrossChainBridgeRequest) -> float: async def _calculate_bridge_progress(self, bridge_request: BridgeRequest) -> float:
"""Calculate bridge progress percentage""" """Calculate bridge progress percentage"""
try: try:
@@ -749,7 +749,7 @@ class CrossChainBridgeService:
logger.error(f"Error calculating bridge progress: {e}") logger.error(f"Error calculating bridge progress: {e}")
return 0.0 return 0.0
async def _process_refund(self, bridge_request: CrossChainBridgeRequest) -> None: async def _process_refund(self, bridge_request: BridgeRequest) -> None:
"""Process refund for cancelled bridge request""" """Process refund for cancelled bridge request"""
try: try:
# Mock refund implementation # Mock refund implementation

View File

@@ -1,3 +1,6 @@
from ..domain.global_marketplace import GlobalMarketplaceAnalyticsRequest
from ..domain.global_marketplace import GlobalMarketplaceTransactionRequest
from ..domain.global_marketplace import GlobalMarketplaceOfferRequest
""" """
Global Marketplace Services Global Marketplace Services
Core services for global marketplace operations, multi-region support, and cross-chain integration Core services for global marketplace operations, multi-region support, and cross-chain integration
@@ -34,7 +37,7 @@ class GlobalMarketplaceService:
async def create_global_offer( async def create_global_offer(
self, self,
request: GlobalMarketplaceOfferRequest, request: "GlobalMarketplaceOfferRequest",
agent_identity: AgentIdentity agent_identity: AgentIdentity
) -> GlobalMarketplaceOffer: ) -> GlobalMarketplaceOffer:
"""Create a new global marketplace offer""" """Create a new global marketplace offer"""
@@ -142,7 +145,7 @@ class GlobalMarketplaceService:
async def create_global_transaction( async def create_global_transaction(
self, self,
request: GlobalMarketplaceTransactionRequest, request: "GlobalMarketplaceTransactionRequest",
buyer_identity: AgentIdentity buyer_identity: AgentIdentity
) -> GlobalMarketplaceTransaction: ) -> GlobalMarketplaceTransaction:
"""Create a global marketplace transaction""" """Create a global marketplace transaction"""
@@ -258,7 +261,7 @@ class GlobalMarketplaceService:
async def get_marketplace_analytics( async def get_marketplace_analytics(
self, self,
request: GlobalMarketplaceAnalyticsRequest request: "GlobalMarketplaceAnalyticsRequest"
) -> GlobalMarketplaceAnalytics: ) -> GlobalMarketplaceAnalytics:
"""Get global marketplace analytics""" """Get global marketplace analytics"""
@@ -291,7 +294,7 @@ class GlobalMarketplaceService:
async def _generate_analytics( async def _generate_analytics(
self, self,
request: GlobalMarketplaceAnalyticsRequest request: "GlobalMarketplaceAnalyticsRequest"
) -> GlobalMarketplaceAnalytics: ) -> GlobalMarketplaceAnalytics:
"""Generate analytics for the specified period""" """Generate analytics for the specified period"""

View File

@@ -18,7 +18,7 @@ from aitbc.logging import get_logger
from sqlmodel import Session, select, update, delete, func, Field from sqlmodel import Session, select, update, delete, func, Field
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from ..domain.cross_chain_bridge import CrossChainBridgeRequest, BridgeRequestStatus from ..domain.cross_chain_bridge import BridgeRequest, BridgeRequestStatus
from ..domain.agent_identity import AgentWallet from ..domain.agent_identity import AgentWallet
from ..agent_identity.wallet_adapter_enhanced import ( from ..agent_identity.wallet_adapter_enhanced import (
EnhancedWalletAdapter, WalletAdapterFactory, SecurityLevel, EnhancedWalletAdapter, WalletAdapterFactory, SecurityLevel,

View File

@@ -86,14 +86,16 @@ def session_scope() -> Generator[Session, None, None]:
# Dependency for FastAPI # Dependency for FastAPI
SessionDep = Session from fastapi import Depends
from typing import Annotated
def get_session() -> Session: def get_session() -> Session:
"""Get a database session.""" """Get a database session."""
engine = get_engine() engine = get_engine()
return Session(engine) return Session(engine)
SessionDep = Annotated[Session, Depends(get_session)]
# Async support for future use # Async support for future use
async def get_async_engine() -> AsyncEngine: async def get_async_engine() -> AsyncEngine:

View File

@@ -173,17 +173,17 @@ def cached(ttl_seconds: int = 300, key_prefix: str = ""):
# Cache configurations for different query types # Cache configurations for different query types
CACHE_CONFIGS = { CACHE_CONFIGS = {
"marketplace_stats": {"ttl": 300, "prefix": "marketplace_"}, # 5 minutes "marketplace_stats": {"ttl_seconds": 300, "key_prefix": "marketplace_"}, # 5 minutes
"job_list": {"ttl": 60, "prefix": "jobs_"}, # 1 minute "job_list": {"ttl_seconds": 60, "key_prefix": "jobs_"}, # 1 minute
"miner_list": {"ttl": 120, "prefix": "miners_"}, # 2 minutes "miner_list": {"ttl_seconds": 120, "key_prefix": "miners_"}, # 2 minutes
"user_balance": {"ttl": 30, "prefix": "balance_"}, # 30 seconds "user_balance": {"ttl_seconds": 30, "key_prefix": "balance_"}, # 30 seconds
"exchange_rates": {"ttl": 600, "prefix": "rates_"}, # 10 minutes "exchange_rates": {"ttl_seconds": 600, "key_prefix": "rates_"}, # 10 minutes
} }
def get_cache_config(cache_type: str) -> Dict[str, Any]: def get_cache_config(cache_type: str) -> Dict[str, Any]:
"""Get cache configuration for a specific type""" """Get cache configuration for a specific type"""
return CACHE_CONFIGS.get(cache_type, {"ttl": 300, "prefix": ""}) return CACHE_CONFIGS.get(cache_type, {"ttl_seconds": 300, "key_prefix": ""})
# Periodic cleanup task # Periodic cleanup task

152
coordinator_working.py Executable file
View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python3
"""
Simple working coordinator API for GPU miner
"""
import logging
from fastapi import FastAPI, HTTPException, Header
from fastapi.middleware.cors import CORSMiddleware
from typing import Optional, Dict, Any
from pydantic import BaseModel
import time
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Create FastAPI app
app = FastAPI(
title="AITBC Coordinator API - Working",
version="0.1.0",
description="Simple working coordinator service for GPU miner",
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"],
allow_headers=["*"]
)
# Simple in-memory storage
miners: Dict[str, Dict[str, Any]] = {}
jobs: Dict[str, Dict[str, Any]] = {}
# Pydantic models
class MinerRegister(BaseModel):
miner_id: str
capabilities: list[str] = []
region: str = "default"
concurrency: int = 1
class MinerHeartbeat(BaseModel):
miner_id: str
status: str = "online"
inflight: int = 0
class JobSubmit(BaseModel):
prompt: str
model: str = "gemma3:1b"
priority: str = "normal"
# Basic auth (simple for testing)
API_KEY = "miner_test"
def verify_api_key(api_key: Optional[str] = Header(None), x_api_key: Optional[str] = Header(None)):
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
return key
@app.get("/health", tags=["health"], summary="Service healthcheck")
async def health() -> dict[str, str]:
"""Health check endpoint"""
return {"status": "ok", "service": "coordinator-api"}
@app.get("/v1/health", tags=["health"], summary="Service healthcheck")
async def health_v1() -> dict[str, str]:
"""Health check endpoint v1"""
return {"status": "ok", "service": "coordinator-api"}
@app.post("/v1/miners/register", tags=["miner"], summary="Register or update miner")
async def register_miner(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> dict[str, str]:
"""Register a miner"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# Get miner_id from query parameter or request body
mid = miner_id or request.get("miner_id", "miner_test")
# Register the miner with simple data
miners[mid] = {
"id": mid,
"capabilities": ["gpu"],
"region": request.get("region", "localhost"),
"concurrency": request.get("concurrency", 1),
"status": "online",
"inflight": 0,
"last_heartbeat": time.time(),
"session_token": f"token_{mid}_{int(time.time())}"
}
logger.info(f"Miner {mid} registered")
return {"status": "ok", "session_token": miners[mid]["session_token"]}
@app.post("/v1/miners/heartbeat", tags=["miner"], summary="Send miner heartbeat")
async def miner_heartbeat(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> dict[str, str]:
"""Receive miner heartbeat"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# Get miner_id from query parameter or request body
mid = miner_id or request.get("miner_id", "miner_test")
if mid not in miners:
raise HTTPException(status_code=404, detail="miner not registered")
miners[mid].update({
"status": request.get("status", "online"),
"inflight": request.get("current_jobs", 0),
"last_heartbeat": time.time()
})
return {"status": "ok"}
@app.post("/v1/miners/poll", tags=["miner"], summary="Poll for next job")
async def poll_for_job(
request: dict,
api_key: Optional[str] = Header(None),
x_api_key: Optional[str] = Header(None),
miner_id: Optional[str] = None
) -> Dict[str, Any]:
"""Poll for next job"""
key = api_key or x_api_key
if key != API_KEY:
raise HTTPException(status_code=401, detail="invalid api key")
# For now, return no jobs (empty response)
return {"status": "no_jobs"}
@app.get("/", tags=["root"], summary="Root endpoint")
async def root() -> dict[str, str]:
"""Root endpoint"""
return {"service": "AITBC Coordinator API", "status": "running"}
if __name__ == "__main__":
import uvicorn
logger.info("Starting working coordinator API on port 9080")
uvicorn.run(app, host="127.0.0.1", port=9080)

View File

@@ -0,0 +1,56 @@
#!/bin/bash
echo "🚀 COMPREHENSIVE BASELINE TEST (Pre-Deployment)"
echo "==============================================="
sites=(
"localhost|http://127.0.0.1:8000|http://127.0.0.1:8082"
"aitbc (Primary)|http://10.1.223.93:8000|http://10.1.223.93:8082"
"aitbc1 (Secondary)|http://10.1.223.40:8000|http://10.1.223.40:8082"
)
for site in "${sites[@]}"; do
IFS='|' read -r name api_url rpc_url <<< "$site"
echo ""
echo "🔍 Testing Site: $name"
echo "-----------------------------------------------"
# 1. API Live Health
api_live=$(curl -s --connect-timeout 2 "$api_url/health/live" || echo "FAILED")
if [[ "$api_live" == *"FAILED"* ]] || [[ -z "$api_live" ]]; then
echo "❌ API Live: DOWN"
else
echo "✅ API Live: UP ($api_live)"
fi
# 2. Blockchain RPC (Testnet Head)
rpc_head=$(curl -s --connect-timeout 2 "$rpc_url/rpc/head?chain_id=ait-testnet" || echo "FAILED")
if [[ "$rpc_head" == *"FAILED"* ]] || [[ -z "$rpc_head" ]]; then
echo "❌ Blockchain RPC: DOWN"
else
height=$(echo $rpc_head | jq -r '.height // "error"')
echo "✅ Blockchain RPC: UP (Height: $height)"
fi
# 3. ZK ML Circuits (Phase 5 check)
zk_circuits=$(curl -s --connect-timeout 2 "$api_url/ml-zk/circuits" || echo "FAILED")
if [[ "$zk_circuits" == *"FAILED"* ]] || [[ -z "$zk_circuits" ]] || [[ "$zk_circuits" == *"Not Found"* ]]; then
echo "⚠️ ZK Circuits: Unavailable or Not Found"
else
circuit_count=$(echo "$zk_circuits" | jq '.circuits | length' 2>/dev/null || echo "0")
echo "✅ ZK Circuits: Available ($circuit_count circuits)"
fi
# 4. Marketplace GPU List
gpu_list=$(curl -s --connect-timeout 2 "$api_url/marketplace/offers" || echo "FAILED")
if [[ "$gpu_list" == *"FAILED"* ]] || [[ -z "$gpu_list" ]]; then
echo "⚠️ Marketplace Offers: Unavailable"
else
offer_count=$(echo "$gpu_list" | jq 'length' 2>/dev/null || echo "0")
echo "✅ Marketplace Offers: Available ($offer_count offers)"
fi
done
echo ""
echo "==============================================="
echo "🏁 BASELINE TEST COMPLETE"