chore: remove configuration files and enhance blockchain explorer with advanced search, analytics, and export features
- Delete .aitbc.yaml.example CLI configuration template - Delete .lycheeignore link checker exclusion rules - Delete .nvmrc Node.js version specification - Add advanced search panel with filters for address, amount range, transaction type, time range, and validator - Add analytics dashboard with transaction volume, active addresses, and block time metrics - Add Chart.js integration
This commit is contained in:
@@ -95,7 +95,7 @@ async def lifespan(app: FastAPI):
|
||||
broadcast_url=settings.gossip_broadcast_url,
|
||||
)
|
||||
await gossip_broker.set_backend(backend)
|
||||
_app_logger.info("Blockchain node started", extra={"chain_id": settings.chain_id})
|
||||
_app_logger.info("Blockchain node started", extra={"supported_chains": settings.supported_chains})
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
@@ -134,7 +134,7 @@ def create_app() -> FastAPI:
|
||||
async def health() -> dict:
|
||||
return {
|
||||
"status": "ok",
|
||||
"chain_id": settings.chain_id,
|
||||
"supported_chains": [c.strip() for c in settings.supported_chains.split(",") if c.strip()],
|
||||
"proposer_id": settings.proposer_id,
|
||||
}
|
||||
|
||||
|
||||
@@ -6,10 +6,20 @@ from typing import Optional
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
class ProposerConfig(BaseModel):
|
||||
chain_id: str
|
||||
proposer_id: str
|
||||
interval_seconds: int
|
||||
max_block_size_bytes: int
|
||||
max_txs_per_block: int
|
||||
|
||||
class ChainSettings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
|
||||
|
||||
chain_id: str = "ait-devnet"
|
||||
supported_chains: str = "ait-devnet" # Comma-separated list of supported chain IDs
|
||||
db_path: Path = Path("./data/chain.db")
|
||||
|
||||
rpc_bind_host: str = "127.0.0.1"
|
||||
|
||||
@@ -4,7 +4,6 @@ import re
|
||||
from datetime import datetime
|
||||
from typing import Callable, ContextManager, Optional
|
||||
|
||||
import httpx
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from ..logger import get_logger
|
||||
@@ -21,6 +20,42 @@ def _sanitize_metric_suffix(value: str) -> str:
|
||||
return sanitized or "unknown"
|
||||
|
||||
|
||||
|
||||
import time
|
||||
|
||||
class CircuitBreaker:
|
||||
def __init__(self, threshold: int, timeout: int):
|
||||
self._threshold = threshold
|
||||
self._timeout = timeout
|
||||
self._failures = 0
|
||||
self._last_failure_time = 0.0
|
||||
self._state = "closed"
|
||||
|
||||
@property
|
||||
def state(self) -> str:
|
||||
if self._state == "open":
|
||||
if time.time() - self._last_failure_time > self._timeout:
|
||||
self._state = "half-open"
|
||||
return self._state
|
||||
|
||||
def allow_request(self) -> bool:
|
||||
state = self.state
|
||||
if state == "closed":
|
||||
return True
|
||||
if state == "half-open":
|
||||
return True
|
||||
return False
|
||||
|
||||
def record_failure(self) -> None:
|
||||
self._failures += 1
|
||||
self._last_failure_time = time.time()
|
||||
if self._failures >= self._threshold:
|
||||
self._state = "open"
|
||||
|
||||
def record_success(self) -> None:
|
||||
self._failures = 0
|
||||
self._state = "closed"
|
||||
|
||||
class PoAProposer:
|
||||
"""Proof-of-Authority block proposer.
|
||||
|
||||
@@ -83,26 +118,13 @@ class PoAProposer:
|
||||
return
|
||||
|
||||
def _propose_block(self) -> None:
|
||||
# Check RPC mempool for transactions
|
||||
try:
|
||||
response = httpx.get("http://localhost:8082/metrics")
|
||||
if response.status_code == 200:
|
||||
has_transactions = False
|
||||
for line in response.text.split("\n"):
|
||||
if line.startswith("mempool_size"):
|
||||
size = float(line.split(" ")[1])
|
||||
if size > 0:
|
||||
has_transactions = True
|
||||
break
|
||||
|
||||
if not has_transactions:
|
||||
return
|
||||
except Exception as exc:
|
||||
self._logger.error(f"Error checking RPC mempool: {exc}")
|
||||
# Check internal mempool
|
||||
from ..mempool import get_mempool
|
||||
if get_mempool().size(self._config.chain_id) == 0:
|
||||
return
|
||||
|
||||
with self._session_factory() as session:
|
||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
||||
next_height = 0
|
||||
parent_hash = "0x00"
|
||||
interval_seconds: Optional[float] = None
|
||||
@@ -115,6 +137,7 @@ class PoAProposer:
|
||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
||||
|
||||
block = Block(
|
||||
chain_id=self._config.chain_id,
|
||||
height=next_height,
|
||||
hash=block_hash,
|
||||
parent_hash=parent_hash,
|
||||
@@ -163,13 +186,15 @@ class PoAProposer:
|
||||
|
||||
def _ensure_genesis_block(self) -> None:
|
||||
with self._session_factory() as session:
|
||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
head = session.exec(select(Block).where(Block.chain_id == self._config.chain_id).order_by(Block.height.desc()).limit(1)).first()
|
||||
if head is not None:
|
||||
return
|
||||
|
||||
timestamp = datetime.utcnow()
|
||||
# Use a deterministic genesis timestamp so all nodes agree on the genesis block hash
|
||||
timestamp = datetime(2025, 1, 1, 0, 0, 0)
|
||||
block_hash = self._compute_block_hash(0, "0x00", timestamp)
|
||||
genesis = Block(
|
||||
chain_id=self._config.chain_id,
|
||||
height=0,
|
||||
hash=block_hash,
|
||||
parent_hash="0x00",
|
||||
|
||||
43
apps/blockchain-node/src/aitbc_chain/logger.py
Normal file
43
apps/blockchain-node/src/aitbc_chain/logger.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import logging
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
log_record = {
|
||||
"timestamp": datetime.utcnow().isoformat() + "Z",
|
||||
"level": record.levelname,
|
||||
"logger": record.name,
|
||||
"message": record.getMessage()
|
||||
}
|
||||
|
||||
# Add any extra arguments passed to the logger
|
||||
if hasattr(record, "chain_id"):
|
||||
log_record["chain_id"] = record.chain_id
|
||||
if hasattr(record, "supported_chains"):
|
||||
log_record["supported_chains"] = record.supported_chains
|
||||
if hasattr(record, "height"):
|
||||
log_record["height"] = record.height
|
||||
if hasattr(record, "hash"):
|
||||
log_record["hash"] = record.hash
|
||||
if hasattr(record, "proposer"):
|
||||
log_record["proposer"] = record.proposer
|
||||
if hasattr(record, "error"):
|
||||
log_record["error"] = record.error
|
||||
|
||||
return json.dumps(log_record)
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
if not logger.handlers:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setFormatter(JsonFormatter())
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
return logger
|
||||
@@ -16,10 +16,10 @@ logger = get_logger(__name__)
|
||||
class BlockchainNode:
|
||||
def __init__(self) -> None:
|
||||
self._stop_event = asyncio.Event()
|
||||
self._proposer: Optional[PoAProposer] = None
|
||||
self._proposers: dict[str, PoAProposer] = {}
|
||||
|
||||
async def start(self) -> None:
|
||||
logger.info("Starting blockchain node", extra={"chain_id": settings.chain_id})
|
||||
logger.info("Starting blockchain node", extra={"supported_chains": getattr(settings, 'supported_chains', settings.chain_id)})
|
||||
init_db()
|
||||
init_mempool(
|
||||
backend=settings.mempool_backend,
|
||||
@@ -27,7 +27,7 @@ class BlockchainNode:
|
||||
max_size=settings.mempool_max_size,
|
||||
min_fee=settings.min_fee,
|
||||
)
|
||||
self._start_proposer()
|
||||
self._start_proposers()
|
||||
try:
|
||||
await self._stop_event.wait()
|
||||
finally:
|
||||
@@ -38,29 +38,29 @@ class BlockchainNode:
|
||||
self._stop_event.set()
|
||||
await self._shutdown()
|
||||
|
||||
def _start_proposer(self) -> None:
|
||||
if self._proposer is not None:
|
||||
return
|
||||
def _start_proposers(self) -> None:
|
||||
chains_str = getattr(settings, 'supported_chains', settings.chain_id)
|
||||
chains = [c.strip() for c in chains_str.split(",") if c.strip()]
|
||||
for chain_id in chains:
|
||||
if chain_id in self._proposers:
|
||||
continue
|
||||
|
||||
proposer_config = ProposerConfig(
|
||||
chain_id=settings.chain_id,
|
||||
proposer_id=settings.proposer_id,
|
||||
interval_seconds=settings.block_time_seconds,
|
||||
max_block_size_bytes=settings.max_block_size_bytes,
|
||||
max_txs_per_block=settings.max_txs_per_block,
|
||||
)
|
||||
cb = CircuitBreaker(
|
||||
threshold=settings.circuit_breaker_threshold,
|
||||
timeout=settings.circuit_breaker_timeout,
|
||||
)
|
||||
self._proposer = PoAProposer(config=proposer_config, session_factory=session_scope, circuit_breaker=cb)
|
||||
asyncio.create_task(self._proposer.start())
|
||||
proposer_config = ProposerConfig(
|
||||
chain_id=chain_id,
|
||||
proposer_id=settings.proposer_id,
|
||||
interval_seconds=settings.block_time_seconds,
|
||||
max_block_size_bytes=settings.max_block_size_bytes,
|
||||
max_txs_per_block=settings.max_txs_per_block,
|
||||
)
|
||||
|
||||
proposer = PoAProposer(config=proposer_config, session_factory=session_scope)
|
||||
self._proposers[chain_id] = proposer
|
||||
asyncio.create_task(proposer.start())
|
||||
|
||||
async def _shutdown(self) -> None:
|
||||
if self._proposer is None:
|
||||
return
|
||||
await self._proposer.stop()
|
||||
self._proposer = None
|
||||
for chain_id, proposer in list(self._proposers.items()):
|
||||
await proposer.stop()
|
||||
self._proposers.clear()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
@@ -38,7 +38,7 @@ class InMemoryMempool:
|
||||
self._max_size = max_size
|
||||
self._min_fee = min_fee
|
||||
|
||||
def add(self, tx: Dict[str, Any]) -> str:
|
||||
def add(self, tx: Dict[str, Any], chain_id: str = "ait-devnet") -> str:
|
||||
fee = tx.get("fee", 0)
|
||||
if fee < self._min_fee:
|
||||
raise ValueError(f"Fee {fee} below minimum {self._min_fee}")
|
||||
@@ -56,14 +56,14 @@ class InMemoryMempool:
|
||||
self._evict_lowest_fee()
|
||||
self._transactions[tx_hash] = entry
|
||||
metrics_registry.set_gauge("mempool_size", float(len(self._transactions)))
|
||||
metrics_registry.increment("mempool_tx_added_total")
|
||||
metrics_registry.increment(f"mempool_tx_added_total_{chain_id}")
|
||||
return tx_hash
|
||||
|
||||
def list_transactions(self) -> List[PendingTransaction]:
|
||||
def list_transactions(self, chain_id: str = "ait-devnet") -> List[PendingTransaction]:
|
||||
with self._lock:
|
||||
return list(self._transactions.values())
|
||||
|
||||
def drain(self, max_count: int, max_bytes: int) -> List[PendingTransaction]:
|
||||
def drain(self, max_count: int, max_bytes: int, chain_id: str = "ait-devnet") -> List[PendingTransaction]:
|
||||
"""Drain transactions for block inclusion, prioritized by fee (highest first)."""
|
||||
with self._lock:
|
||||
sorted_txs = sorted(
|
||||
@@ -84,17 +84,17 @@ class InMemoryMempool:
|
||||
del self._transactions[tx.tx_hash]
|
||||
|
||||
metrics_registry.set_gauge("mempool_size", float(len(self._transactions)))
|
||||
metrics_registry.increment("mempool_tx_drained_total", float(len(result)))
|
||||
metrics_registry.increment(f"mempool_tx_drained_total_{chain_id}", float(len(result)))
|
||||
return result
|
||||
|
||||
def remove(self, tx_hash: str) -> bool:
|
||||
def remove(self, tx_hash: str, chain_id: str = "ait-devnet") -> bool:
|
||||
with self._lock:
|
||||
removed = self._transactions.pop(tx_hash, None) is not None
|
||||
if removed:
|
||||
metrics_registry.set_gauge("mempool_size", float(len(self._transactions)))
|
||||
return removed
|
||||
|
||||
def size(self) -> int:
|
||||
def size(self, chain_id: str = "ait-devnet") -> int:
|
||||
with self._lock:
|
||||
return len(self._transactions)
|
||||
|
||||
@@ -104,7 +104,7 @@ class InMemoryMempool:
|
||||
return
|
||||
lowest = min(self._transactions.values(), key=lambda t: (t.fee, -t.received_at))
|
||||
del self._transactions[lowest.tx_hash]
|
||||
metrics_registry.increment("mempool_evictions_total")
|
||||
metrics_registry.increment(f"mempool_evictions_total_{chain_id}")
|
||||
|
||||
|
||||
class DatabaseMempool:
|
||||
@@ -123,17 +123,19 @@ class DatabaseMempool:
|
||||
with self._lock:
|
||||
self._conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS mempool (
|
||||
tx_hash TEXT PRIMARY KEY,
|
||||
chain_id TEXT NOT NULL,
|
||||
tx_hash TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
fee INTEGER DEFAULT 0,
|
||||
size_bytes INTEGER DEFAULT 0,
|
||||
received_at REAL NOT NULL
|
||||
received_at REAL NOT NULL,
|
||||
PRIMARY KEY (chain_id, tx_hash)
|
||||
)
|
||||
""")
|
||||
self._conn.execute("CREATE INDEX IF NOT EXISTS idx_mempool_fee ON mempool(fee DESC)")
|
||||
self._conn.commit()
|
||||
|
||||
def add(self, tx: Dict[str, Any]) -> str:
|
||||
def add(self, tx: Dict[str, Any], chain_id: str = "ait-devnet") -> str:
|
||||
fee = tx.get("fee", 0)
|
||||
if fee < self._min_fee:
|
||||
raise ValueError(f"Fee {fee} below minimum {self._min_fee}")
|
||||
@@ -144,33 +146,34 @@ class DatabaseMempool:
|
||||
|
||||
with self._lock:
|
||||
# Check duplicate
|
||||
row = self._conn.execute("SELECT 1 FROM mempool WHERE tx_hash = ?", (tx_hash,)).fetchone()
|
||||
row = self._conn.execute("SELECT 1 FROM mempool WHERE chain_id = ? AND tx_hash = ?", (chain_id, tx_hash)).fetchone()
|
||||
if row:
|
||||
return tx_hash
|
||||
|
||||
# Evict if full
|
||||
count = self._conn.execute("SELECT COUNT(*) FROM mempool").fetchone()[0]
|
||||
count = self._conn.execute("SELECT COUNT(*) FROM mempool WHERE chain_id = ?", (chain_id,)).fetchone()[0]
|
||||
if count >= self._max_size:
|
||||
self._conn.execute("""
|
||||
DELETE FROM mempool WHERE tx_hash = (
|
||||
SELECT tx_hash FROM mempool ORDER BY fee ASC, received_at DESC LIMIT 1
|
||||
DELETE FROM mempool WHERE chain_id = ? AND tx_hash = (
|
||||
SELECT tx_hash FROM mempool WHERE chain_id = ? ORDER BY fee ASC, received_at DESC LIMIT 1
|
||||
)
|
||||
""")
|
||||
metrics_registry.increment("mempool_evictions_total")
|
||||
""", (chain_id, chain_id))
|
||||
metrics_registry.increment(f"mempool_evictions_total_{chain_id}")
|
||||
|
||||
self._conn.execute(
|
||||
"INSERT INTO mempool (tx_hash, content, fee, size_bytes, received_at) VALUES (?, ?, ?, ?, ?)",
|
||||
(tx_hash, content, fee, size_bytes, time.time())
|
||||
"INSERT INTO mempool (chain_id, tx_hash, content, fee, size_bytes, received_at) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(chain_id, tx_hash, content, fee, size_bytes, time.time())
|
||||
)
|
||||
self._conn.commit()
|
||||
metrics_registry.increment("mempool_tx_added_total")
|
||||
self._update_gauge()
|
||||
metrics_registry.increment(f"mempool_tx_added_total_{chain_id}")
|
||||
self._update_gauge(chain_id)
|
||||
return tx_hash
|
||||
|
||||
def list_transactions(self) -> List[PendingTransaction]:
|
||||
def list_transactions(self, chain_id: str = "ait-devnet") -> List[PendingTransaction]:
|
||||
with self._lock:
|
||||
rows = self._conn.execute(
|
||||
"SELECT tx_hash, content, fee, size_bytes, received_at FROM mempool ORDER BY fee DESC, received_at ASC"
|
||||
"SELECT tx_hash, content, fee, size_bytes, received_at FROM mempool WHERE chain_id = ? ORDER BY fee DESC, received_at ASC",
|
||||
(chain_id,)
|
||||
).fetchall()
|
||||
return [
|
||||
PendingTransaction(
|
||||
@@ -179,10 +182,11 @@ class DatabaseMempool:
|
||||
) for r in rows
|
||||
]
|
||||
|
||||
def drain(self, max_count: int, max_bytes: int) -> List[PendingTransaction]:
|
||||
def drain(self, max_count: int, max_bytes: int, chain_id: str = "ait-devnet") -> List[PendingTransaction]:
|
||||
with self._lock:
|
||||
rows = self._conn.execute(
|
||||
"SELECT tx_hash, content, fee, size_bytes, received_at FROM mempool ORDER BY fee DESC, received_at ASC"
|
||||
"SELECT tx_hash, content, fee, size_bytes, received_at FROM mempool WHERE chain_id = ? ORDER BY fee DESC, received_at ASC",
|
||||
(chain_id,)
|
||||
).fetchall()
|
||||
|
||||
result: List[PendingTransaction] = []
|
||||
@@ -203,29 +207,29 @@ class DatabaseMempool:
|
||||
|
||||
if hashes_to_remove:
|
||||
placeholders = ",".join("?" * len(hashes_to_remove))
|
||||
self._conn.execute(f"DELETE FROM mempool WHERE tx_hash IN ({placeholders})", hashes_to_remove)
|
||||
self._conn.execute(f"DELETE FROM mempool WHERE chain_id = ? AND tx_hash IN ({placeholders})", [chain_id] + hashes_to_remove)
|
||||
self._conn.commit()
|
||||
|
||||
metrics_registry.increment("mempool_tx_drained_total", float(len(result)))
|
||||
self._update_gauge()
|
||||
metrics_registry.increment(f"mempool_tx_drained_total_{chain_id}", float(len(result)))
|
||||
self._update_gauge(chain_id)
|
||||
return result
|
||||
|
||||
def remove(self, tx_hash: str) -> bool:
|
||||
def remove(self, tx_hash: str, chain_id: str = "ait-devnet") -> bool:
|
||||
with self._lock:
|
||||
cursor = self._conn.execute("DELETE FROM mempool WHERE tx_hash = ?", (tx_hash,))
|
||||
cursor = self._conn.execute("DELETE FROM mempool WHERE chain_id = ? AND tx_hash = ?", (chain_id, tx_hash))
|
||||
self._conn.commit()
|
||||
removed = cursor.rowcount > 0
|
||||
if removed:
|
||||
self._update_gauge()
|
||||
self._update_gauge(chain_id)
|
||||
return removed
|
||||
|
||||
def size(self) -> int:
|
||||
def size(self, chain_id: str = "ait-devnet") -> int:
|
||||
with self._lock:
|
||||
return self._conn.execute("SELECT COUNT(*) FROM mempool").fetchone()[0]
|
||||
return self._conn.execute("SELECT COUNT(*) FROM mempool WHERE chain_id = ?", (chain_id,)).fetchone()[0]
|
||||
|
||||
def _update_gauge(self) -> None:
|
||||
count = self._conn.execute("SELECT COUNT(*) FROM mempool").fetchone()[0]
|
||||
metrics_registry.set_gauge("mempool_size", float(count))
|
||||
def _update_gauge(self, chain_id: str = "ait-devnet") -> None:
|
||||
count = self._conn.execute("SELECT COUNT(*) FROM mempool WHERE chain_id = ?", (chain_id,)).fetchone()[0]
|
||||
metrics_registry.set_gauge(f"mempool_size_{chain_id}", float(count))
|
||||
|
||||
|
||||
# Singleton
|
||||
|
||||
@@ -6,6 +6,7 @@ from pydantic import field_validator
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy.types import JSON
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
_HEX_PATTERN = re.compile(r"^(0x)?[0-9a-fA-F]+$")
|
||||
|
||||
@@ -24,9 +25,11 @@ def _validate_optional_hex(value: Optional[str], field_name: str) -> Optional[st
|
||||
|
||||
class Block(SQLModel, table=True):
|
||||
__tablename__ = "block"
|
||||
__table_args__ = (UniqueConstraint("chain_id", "height", name="uix_block_chain_height"),)
|
||||
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
height: int = Field(index=True, unique=True)
|
||||
chain_id: str = Field(index=True)
|
||||
height: int = Field(index=True)
|
||||
hash: str = Field(index=True, unique=True)
|
||||
parent_hash: str
|
||||
proposer: str
|
||||
@@ -37,11 +40,19 @@ class Block(SQLModel, table=True):
|
||||
# Relationships - use sa_relationship_kwargs for lazy loading
|
||||
transactions: List["Transaction"] = Relationship(
|
||||
back_populates="block",
|
||||
sa_relationship_kwargs={"lazy": "selectin"}
|
||||
sa_relationship_kwargs={
|
||||
"lazy": "selectin",
|
||||
"primaryjoin": "and_(Transaction.block_height==Block.height, Transaction.chain_id==Block.chain_id)",
|
||||
"foreign_keys": "[Transaction.block_height, Transaction.chain_id]"
|
||||
}
|
||||
)
|
||||
receipts: List["Receipt"] = Relationship(
|
||||
back_populates="block",
|
||||
sa_relationship_kwargs={"lazy": "selectin"}
|
||||
sa_relationship_kwargs={
|
||||
"lazy": "selectin",
|
||||
"primaryjoin": "and_(Receipt.block_height==Block.height, Receipt.chain_id==Block.chain_id)",
|
||||
"foreign_keys": "[Receipt.block_height, Receipt.chain_id]"
|
||||
}
|
||||
)
|
||||
|
||||
@field_validator("hash", mode="before")
|
||||
@@ -62,13 +73,14 @@ class Block(SQLModel, table=True):
|
||||
|
||||
class Transaction(SQLModel, table=True):
|
||||
__tablename__ = "transaction"
|
||||
__table_args__ = (UniqueConstraint("chain_id", "tx_hash", name="uix_tx_chain_hash"),)
|
||||
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
tx_hash: str = Field(index=True, unique=True)
|
||||
chain_id: str = Field(index=True)
|
||||
tx_hash: str = Field(index=True)
|
||||
block_height: Optional[int] = Field(
|
||||
default=None,
|
||||
index=True,
|
||||
foreign_key="block.height",
|
||||
)
|
||||
sender: str
|
||||
recipient: str
|
||||
@@ -79,7 +91,13 @@ class Transaction(SQLModel, table=True):
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||
|
||||
# Relationship
|
||||
block: Optional["Block"] = Relationship(back_populates="transactions")
|
||||
block: Optional["Block"] = Relationship(
|
||||
back_populates="transactions",
|
||||
sa_relationship_kwargs={
|
||||
"primaryjoin": "and_(Transaction.block_height==Block.height, Transaction.chain_id==Block.chain_id)",
|
||||
"foreign_keys": "[Transaction.block_height, Transaction.chain_id]"
|
||||
}
|
||||
)
|
||||
|
||||
@field_validator("tx_hash", mode="before")
|
||||
@classmethod
|
||||
@@ -89,14 +107,15 @@ class Transaction(SQLModel, table=True):
|
||||
|
||||
class Receipt(SQLModel, table=True):
|
||||
__tablename__ = "receipt"
|
||||
__table_args__ = (UniqueConstraint("chain_id", "receipt_id", name="uix_receipt_chain_id"),)
|
||||
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
chain_id: str = Field(index=True)
|
||||
job_id: str = Field(index=True)
|
||||
receipt_id: str = Field(index=True, unique=True)
|
||||
receipt_id: str = Field(index=True)
|
||||
block_height: Optional[int] = Field(
|
||||
default=None,
|
||||
index=True,
|
||||
foreign_key="block.height",
|
||||
)
|
||||
payload: dict = Field(
|
||||
default_factory=dict,
|
||||
@@ -114,7 +133,13 @@ class Receipt(SQLModel, table=True):
|
||||
recorded_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||
|
||||
# Relationship
|
||||
block: Optional["Block"] = Relationship(back_populates="receipts")
|
||||
block: Optional["Block"] = Relationship(
|
||||
back_populates="receipts",
|
||||
sa_relationship_kwargs={
|
||||
"primaryjoin": "and_(Receipt.block_height==Block.height, Receipt.chain_id==Block.chain_id)",
|
||||
"foreign_keys": "[Receipt.block_height, Receipt.chain_id]"
|
||||
}
|
||||
)
|
||||
|
||||
@field_validator("receipt_id", mode="before")
|
||||
@classmethod
|
||||
@@ -125,6 +150,7 @@ class Receipt(SQLModel, table=True):
|
||||
class Account(SQLModel, table=True):
|
||||
__tablename__ = "account"
|
||||
|
||||
chain_id: str = Field(primary_key=True)
|
||||
address: str = Field(primary_key=True)
|
||||
balance: int = 0
|
||||
nonce: int = 0
|
||||
|
||||
@@ -67,11 +67,11 @@ class MintFaucetRequest(BaseModel):
|
||||
|
||||
|
||||
@router.get("/head", summary="Get current chain head")
|
||||
async def get_head() -> Dict[str, Any]:
|
||||
async def get_head(chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_head_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
result = session.exec(select(Block).where(Block.chain_id == chain_id).order_by(Block.height.desc()).limit(1)).first()
|
||||
if result is None:
|
||||
metrics_registry.increment("rpc_get_head_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
||||
@@ -161,11 +161,11 @@ async def get_blocks_range(start: int, end: int) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
||||
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
||||
async def get_transaction(tx_hash: str, chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_transaction_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
||||
tx = session.exec(select(Transaction).where(Transaction.chain_id == chain_id).where(Transaction.tx_hash == tx_hash)).first()
|
||||
if tx is None:
|
||||
metrics_registry.increment("rpc_get_transaction_not_found_total")
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
||||
@@ -304,7 +304,7 @@ async def get_balance(address: str) -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_get_balance_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, address)
|
||||
account = session.get(Account, (chain_id, address))
|
||||
if account is None:
|
||||
metrics_registry.increment("rpc_get_balance_empty_total")
|
||||
metrics_registry.observe("rpc_get_balance_duration_seconds", time.perf_counter() - start)
|
||||
@@ -332,7 +332,7 @@ async def get_address_details(address: str, limit: int = 20, offset: int = 0) ->
|
||||
|
||||
with session_scope() as session:
|
||||
# Get account info
|
||||
account = session.get(Account, address)
|
||||
account = session.get(Account, (chain_id, address))
|
||||
|
||||
# Get transactions where this address is sender or recipient
|
||||
sent_txs = session.exec(
|
||||
@@ -399,6 +399,7 @@ async def get_addresses(limit: int = 20, offset: int = 0, min_balance: int = 0)
|
||||
# Get addresses with balance >= min_balance
|
||||
addresses = session.exec(
|
||||
select(Account)
|
||||
.where(Account.chain_id == chain_id)
|
||||
.where(Account.balance >= min_balance)
|
||||
.order_by(Account.balance.desc())
|
||||
.offset(offset)
|
||||
@@ -406,7 +407,7 @@ async def get_addresses(limit: int = 20, offset: int = 0, min_balance: int = 0)
|
||||
).all()
|
||||
|
||||
# Get total count
|
||||
total_count = len(session.exec(select(Account).where(Account.balance >= min_balance)).all())
|
||||
total_count = len(session.exec(select(Account).where(Account.chain_id == chain_id).where(Account.balance >= min_balance)).all())
|
||||
|
||||
if not addresses:
|
||||
metrics_registry.increment("rpc_get_addresses_empty_total")
|
||||
@@ -421,8 +422,8 @@ async def get_addresses(limit: int = 20, offset: int = 0, min_balance: int = 0)
|
||||
address_list = []
|
||||
for addr in addresses:
|
||||
# Get transaction counts
|
||||
sent_count = session.exec(select(func.count()).select_from(Transaction).where(Transaction.sender == addr.address)).one()
|
||||
received_count = session.exec(select(func.count()).select_from(Transaction).where(Transaction.recipient == addr.address)).one()
|
||||
sent_count = session.exec(select(func.count()).select_from(Transaction).where(Transaction.chain_id == chain_id).where(Transaction.sender == addr.address)).one()
|
||||
received_count = session.exec(select(func.count()).select_from(Transaction).where(Transaction.chain_id == chain_id).where(Transaction.recipient == addr.address)).one()
|
||||
|
||||
address_list.append({
|
||||
"address": addr.address,
|
||||
@@ -445,13 +446,13 @@ async def get_addresses(limit: int = 20, offset: int = 0, min_balance: int = 0)
|
||||
|
||||
|
||||
@router.post("/sendTx", summary="Submit a new transaction")
|
||||
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||
async def send_transaction(request: TransactionRequest, chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_send_tx_total")
|
||||
start = time.perf_counter()
|
||||
mempool = get_mempool()
|
||||
tx_dict = request.model_dump()
|
||||
try:
|
||||
tx_hash = mempool.add(tx_dict)
|
||||
tx_hash = mempool.add(tx_dict, chain_id=chain_id)
|
||||
except ValueError as e:
|
||||
metrics_registry.increment("rpc_send_tx_rejected_total")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
@@ -484,7 +485,7 @@ async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
||||
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||
async def submit_receipt(request: ReceiptSubmissionRequest, chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_submit_receipt_total")
|
||||
start = time.perf_counter()
|
||||
tx_payload = {
|
||||
@@ -497,7 +498,7 @@ async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||
}
|
||||
tx_request = TransactionRequest.model_validate(tx_payload)
|
||||
try:
|
||||
response = await send_transaction(tx_request)
|
||||
response = await send_transaction(tx_request, chain_id)
|
||||
metrics_registry.increment("rpc_submit_receipt_success_total")
|
||||
return response
|
||||
except HTTPException:
|
||||
@@ -530,13 +531,13 @@ async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
||||
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||
async def mint_faucet(request: MintFaucetRequest, chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
metrics_registry.increment("rpc_mint_faucet_total")
|
||||
start = time.perf_counter()
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, request.address)
|
||||
account = session.get(Account, (chain_id, request.address))
|
||||
if account is None:
|
||||
account = Account(address=request.address, balance=request.amount)
|
||||
account = Account(chain_id=chain_id, address=request.address, balance=request.amount)
|
||||
session.add(account)
|
||||
else:
|
||||
account.balance += request.amount
|
||||
@@ -559,7 +560,7 @@ class ImportBlockRequest(BaseModel):
|
||||
|
||||
|
||||
@router.post("/importBlock", summary="Import a block from a remote peer")
|
||||
async def import_block(request: ImportBlockRequest) -> Dict[str, Any]:
|
||||
async def import_block(request: ImportBlockRequest, chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
from ..sync import ChainSync, ProposerSignatureValidator
|
||||
from ..config import settings as cfg
|
||||
|
||||
@@ -570,7 +571,7 @@ async def import_block(request: ImportBlockRequest) -> Dict[str, Any]:
|
||||
validator = ProposerSignatureValidator(trusted_proposers=trusted if trusted else None)
|
||||
sync = ChainSync(
|
||||
session_factory=session_scope,
|
||||
chain_id=cfg.chain_id,
|
||||
chain_id=chain_id,
|
||||
max_reorg_depth=cfg.max_reorg_depth,
|
||||
validator=validator,
|
||||
validate_signatures=cfg.sync_validate_signatures,
|
||||
@@ -598,10 +599,10 @@ async def import_block(request: ImportBlockRequest) -> Dict[str, Any]:
|
||||
|
||||
|
||||
@router.get("/syncStatus", summary="Get chain sync status")
|
||||
async def sync_status() -> Dict[str, Any]:
|
||||
async def sync_status(chain_id: str = "ait-devnet") -> Dict[str, Any]:
|
||||
from ..sync import ChainSync
|
||||
from ..config import settings as cfg
|
||||
|
||||
metrics_registry.increment("rpc_sync_status_total")
|
||||
sync = ChainSync(session_factory=session_scope, chain_id=cfg.chain_id)
|
||||
sync = ChainSync(session_factory=session_scope, chain_id=chain_id)
|
||||
return sync.get_sync_status()
|
||||
|
||||
@@ -140,14 +140,14 @@ class ChainSync:
|
||||
|
||||
# Get our chain head
|
||||
our_head = session.exec(
|
||||
select(Block).order_by(Block.height.desc()).limit(1)
|
||||
select(Block).where(Block.chain_id == self._chain_id).order_by(Block.height.desc()).limit(1)
|
||||
).first()
|
||||
our_height = our_head.height if our_head else -1
|
||||
|
||||
# Case 1: Block extends our chain directly
|
||||
if height == our_height + 1:
|
||||
parent_exists = session.exec(
|
||||
select(Block).where(Block.hash == parent_hash)
|
||||
select(Block).where(Block.chain_id == self._chain_id).where(Block.hash == parent_hash)
|
||||
).first()
|
||||
if parent_exists or (height == 0 and parent_hash == "0x00"):
|
||||
result = self._append_block(session, block_data, transactions)
|
||||
@@ -159,7 +159,7 @@ class ChainSync:
|
||||
if height <= our_height:
|
||||
# Check if it's a fork at a previous height
|
||||
existing_at_height = session.exec(
|
||||
select(Block).where(Block.height == height)
|
||||
select(Block).where(Block.chain_id == self._chain_id).where(Block.height == height)
|
||||
).first()
|
||||
if existing_at_height and existing_at_height.hash != block_hash:
|
||||
# Fork detected — resolve by longest chain rule
|
||||
@@ -191,6 +191,7 @@ class ChainSync:
|
||||
tx_count = len(transactions)
|
||||
|
||||
block = Block(
|
||||
chain_id=self._chain_id,
|
||||
height=block_data["height"],
|
||||
hash=block_data["hash"],
|
||||
parent_hash=block_data["parent_hash"],
|
||||
@@ -205,6 +206,7 @@ class ChainSync:
|
||||
if transactions:
|
||||
for tx_data in transactions:
|
||||
tx = Transaction(
|
||||
chain_id=self._chain_id,
|
||||
tx_hash=tx_data.get("tx_hash", ""),
|
||||
block_height=block_data["height"],
|
||||
sender=tx_data.get("sender", ""),
|
||||
@@ -271,14 +273,14 @@ class ChainSync:
|
||||
|
||||
# Perform reorg: remove blocks from fork_height onwards, then append
|
||||
blocks_to_remove = session.exec(
|
||||
select(Block).where(Block.height >= fork_height).order_by(Block.height.desc())
|
||||
select(Block).where(Block.chain_id == self._chain_id).where(Block.height >= fork_height).order_by(Block.height.desc())
|
||||
).all()
|
||||
|
||||
removed_count = 0
|
||||
for old_block in blocks_to_remove:
|
||||
# Remove transactions in the block
|
||||
old_txs = session.exec(
|
||||
select(Transaction).where(Transaction.block_height == old_block.height)
|
||||
select(Transaction).where(Transaction.chain_id == self._chain_id).where(Transaction.block_height == old_block.height)
|
||||
).all()
|
||||
for tx in old_txs:
|
||||
session.delete(tx)
|
||||
@@ -304,11 +306,11 @@ class ChainSync:
|
||||
"""Get current sync status and metrics."""
|
||||
with self._session_factory() as session:
|
||||
head = session.exec(
|
||||
select(Block).order_by(Block.height.desc()).limit(1)
|
||||
select(Block).where(Block.chain_id == self._chain_id).order_by(Block.height.desc()).limit(1)
|
||||
).first()
|
||||
|
||||
total_blocks = session.exec(select(Block)).all()
|
||||
total_txs = session.exec(select(Transaction)).all()
|
||||
total_blocks = session.exec(select(Block).where(Block.chain_id == self._chain_id)).all()
|
||||
total_txs = session.exec(select(Transaction).where(Transaction.chain_id == self._chain_id)).all()
|
||||
|
||||
return {
|
||||
"chain_id": self._chain_id,
|
||||
|
||||
Reference in New Issue
Block a user