chore: initialize monorepo with project scaffolding, configs, and CI setup
This commit is contained in:
25
apps/blockchain-node/README.md
Normal file
25
apps/blockchain-node/README.md
Normal file
@ -0,0 +1,25 @@
|
||||
# Blockchain Node
|
||||
|
||||
## Purpose & Scope
|
||||
|
||||
Minimal asset-backed blockchain node that validates compute receipts and mints AIT tokens as described in `docs/bootstrap/blockchain_node.md`.
|
||||
|
||||
## Status
|
||||
|
||||
Scaffolded. Implementation pending per staged roadmap.
|
||||
|
||||
## Devnet Tooling
|
||||
|
||||
- `scripts/make_genesis.py` — Generate a deterministic devnet genesis file (`data/devnet/genesis.json`).
|
||||
- `scripts/keygen.py` — Produce throwaway devnet keypairs (printed or written to disk).
|
||||
- `scripts/devnet_up.sh` — Launch the blockchain node and RPC API with a freshly generated genesis file.
|
||||
|
||||
### Quickstart
|
||||
|
||||
```bash
|
||||
cd apps/blockchain-node
|
||||
python scripts/make_genesis.py --force
|
||||
bash scripts/devnet_up.sh
|
||||
```
|
||||
|
||||
The script sets `PYTHONPATH=src` and starts the proposer loop plus the FastAPI app (via `uvicorn`). Press `Ctrl+C` to stop the devnet.
|
||||
147
apps/blockchain-node/alembic.ini
Normal file
147
apps/blockchain-node/alembic.ini
Normal file
@ -0,0 +1,147 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
BIN
apps/blockchain-node/data/chain.db
Normal file
BIN
apps/blockchain-node/data/chain.db
Normal file
Binary file not shown.
1
apps/blockchain-node/migrations/README
Normal file
1
apps/blockchain-node/migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
85
apps/blockchain-node/migrations/env.py
Normal file
85
apps/blockchain-node/migrations/env.py
Normal file
@ -0,0 +1,85 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
from alembic import context
|
||||
|
||||
from aitbc_chain.config import settings
|
||||
from aitbc_chain import models # noqa: F401
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Ensure the database path exists and propagate URL to Alembic config
|
||||
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
config.set_main_option("sqlalchemy.url", f"sqlite:///{settings.db_path}")
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Use SQLModel metadata for autogeneration.
|
||||
target_metadata = SQLModel.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
apps/blockchain-node/migrations/script.py.mako
Normal file
28
apps/blockchain-node/migrations/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@ -0,0 +1,34 @@
|
||||
"""add block relationships
|
||||
|
||||
Revision ID: 80bc0020bde2
|
||||
Revises: e31f486f1484
|
||||
Create Date: 2025-09-27 06:02:11.656859
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '80bc0020bde2'
|
||||
down_revision: Union[str, Sequence[str], None] = 'e31f486f1484'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_foreign_key(None, 'receipt', 'block', ['block_height'], ['height'])
|
||||
op.create_foreign_key(None, 'transaction', 'block', ['block_height'], ['height'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'transaction', type_='foreignkey')
|
||||
op.drop_constraint(None, 'receipt', type_='foreignkey')
|
||||
# ### end Alembic commands ###
|
||||
@ -0,0 +1,103 @@
|
||||
"""baseline
|
||||
|
||||
Revision ID: e31f486f1484
|
||||
Revises:
|
||||
Create Date: 2025-09-27 05:58:27.490151
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "e31f486f1484"
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
|
||||
op.create_table(
|
||||
"block",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("height", sa.Integer(), nullable=False),
|
||||
sa.Column("hash", sa.String(), nullable=False),
|
||||
sa.Column("parent_hash", sa.String(), nullable=False),
|
||||
sa.Column("proposer", sa.String(), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||
sa.Column("tx_count", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("state_root", sa.String(), nullable=True),
|
||||
)
|
||||
op.create_index("ix_block_height", "block", ["height"], unique=True)
|
||||
op.create_index("ix_block_hash", "block", ["hash"], unique=True)
|
||||
op.create_index("ix_block_timestamp", "block", ["timestamp"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"transaction",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||
sa.Column("sender", sa.String(), nullable=False),
|
||||
sa.Column("recipient", sa.String(), nullable=False),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||
op.create_index(
|
||||
"ix_transaction_block_height", "transaction", ["block_height"], unique=False
|
||||
)
|
||||
op.create_index(
|
||||
"ix_transaction_created_at", "transaction", ["created_at"], unique=False
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"receipt",
|
||||
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||
sa.Column("job_id", sa.String(), nullable=False),
|
||||
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||
sa.Column("payload", sa.JSON(), nullable=False),
|
||||
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||
)
|
||||
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||
|
||||
op.create_table(
|
||||
"account",
|
||||
sa.Column("address", sa.String(), nullable=False),
|
||||
sa.Column("balance", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("nonce", sa.Integer(), nullable=False, server_default="0"),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("address"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
|
||||
op.drop_table("account")
|
||||
|
||||
op.drop_index("ix_receipt_recorded_at", table_name="receipt")
|
||||
op.drop_index("ix_receipt_block_height", table_name="receipt")
|
||||
op.drop_index("ix_receipt_receipt_id", table_name="receipt")
|
||||
op.drop_index("ix_receipt_job_id", table_name="receipt")
|
||||
op.drop_table("receipt")
|
||||
|
||||
op.drop_index("ix_transaction_created_at", table_name="transaction")
|
||||
op.drop_index("ix_transaction_block_height", table_name="transaction")
|
||||
op.drop_index("ix_transaction_tx_hash", table_name="transaction")
|
||||
op.drop_table("transaction")
|
||||
|
||||
op.drop_index("ix_block_timestamp", table_name="block")
|
||||
op.drop_index("ix_block_hash", table_name="block")
|
||||
op.drop_index("ix_block_height", table_name="block")
|
||||
op.drop_table("block")
|
||||
1673
apps/blockchain-node/poetry.lock
generated
Normal file
1673
apps/blockchain-node/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
37
apps/blockchain-node/pyproject.toml
Normal file
37
apps/blockchain-node/pyproject.toml
Normal file
@ -0,0 +1,37 @@
|
||||
[tool.poetry]
|
||||
name = "aitbc-blockchain-node"
|
||||
version = "0.1.0"
|
||||
description = "AITBC blockchain node service"
|
||||
authors = ["AITBC Team"]
|
||||
packages = [
|
||||
{ include = "aitbc_chain", from = "src" }
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
fastapi = "^0.111.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.30.0" }
|
||||
sqlmodel = "^0.0.16"
|
||||
sqlalchemy = "^2.0.30"
|
||||
alembic = "^1.13.1"
|
||||
aiosqlite = "^0.20.0"
|
||||
websockets = "^12.0"
|
||||
pydantic = "^2.7.0"
|
||||
pydantic-settings = "^2.2.1"
|
||||
orjson = "^3.10.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
httpx = "^0.27.0"
|
||||
uvloop = { version = "^0.19.0", optional = true }
|
||||
rich = "^13.7.1"
|
||||
cryptography = "^42.0.5"
|
||||
|
||||
[tool.poetry.extras]
|
||||
uvloop = ["uvloop"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^8.2.0"
|
||||
pytest-asyncio = "^0.23.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
36
apps/blockchain-node/scripts/devnet_up.sh
Normal file
36
apps/blockchain-node/scripts/devnet_up.sh
Normal file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
export PYTHONPATH="${ROOT_DIR}/src:${ROOT_DIR}/scripts:${PYTHONPATH:-}"
|
||||
|
||||
GENESIS_PATH="${ROOT_DIR}/data/devnet/genesis.json"
|
||||
python "${ROOT_DIR}/scripts/make_genesis.py" --output "${GENESIS_PATH}" --force
|
||||
|
||||
echo "[devnet] Generated genesis at ${GENESIS_PATH}"
|
||||
|
||||
declare -a CHILD_PIDS=()
|
||||
cleanup() {
|
||||
for pid in "${CHILD_PIDS[@]}"; do
|
||||
if kill -0 "$pid" 2>/dev/null; then
|
||||
kill "$pid" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
python -m aitbc_chain.main &
|
||||
CHILD_PIDS+=($!)
|
||||
echo "[devnet] Blockchain node started (PID ${CHILD_PIDS[-1]})"
|
||||
|
||||
sleep 1
|
||||
|
||||
python -m uvicorn aitbc_chain.app:app --host 127.0.0.1 --port 8080 --log-level info &
|
||||
CHILD_PIDS+=($!)
|
||||
echo "[devnet] RPC API serving at http://127.0.0.1:8080"
|
||||
|
||||
python -m uvicorn mock_coordinator:app --host 127.0.0.1 --port 8090 --log-level info &
|
||||
CHILD_PIDS+=($!)
|
||||
echo "[devnet] Mock coordinator serving at http://127.0.0.1:8090"
|
||||
|
||||
wait
|
||||
46
apps/blockchain-node/scripts/keygen.py
Normal file
46
apps/blockchain-node/scripts/keygen.py
Normal file
@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate a pseudo devnet key pair for blockchain components."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import secrets
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Generate a devnet key pair")
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=Path,
|
||||
help="Optional path to write the keypair JSON (prints to stdout if omitted)",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def generate_keypair() -> dict:
|
||||
private_key = secrets.token_hex(32)
|
||||
public_key = secrets.token_hex(32)
|
||||
address = "ait1" + secrets.token_hex(20)
|
||||
return {
|
||||
"private_key": private_key,
|
||||
"public_key": public_key,
|
||||
"address": address,
|
||||
}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
keypair = generate_keypair()
|
||||
payload = json.dumps(keypair, indent=2)
|
||||
if args.output:
|
||||
args.output.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.output.write_text(payload + "\n", encoding="utf-8")
|
||||
print(f"[keygen] wrote keypair to {args.output}")
|
||||
else:
|
||||
print(payload)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
96
apps/blockchain-node/scripts/make_genesis.py
Normal file
96
apps/blockchain-node/scripts/make_genesis.py
Normal file
@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Generate a deterministic devnet genesis file for the blockchain node."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
DEFAULT_GENESIS = {
|
||||
"chain_id": "ait-devnet",
|
||||
"timestamp": None, # populated at runtime
|
||||
"params": {
|
||||
"mint_per_unit": 1000,
|
||||
"coordinator_ratio": 0.05,
|
||||
"base_fee": 10,
|
||||
"fee_per_byte": 1,
|
||||
},
|
||||
"accounts": [
|
||||
{
|
||||
"address": "ait1faucet000000000000000000000000000000000",
|
||||
"balance": 1_000_000_000,
|
||||
"nonce": 0,
|
||||
}
|
||||
],
|
||||
"authorities": [
|
||||
{
|
||||
"address": "ait1devproposer000000000000000000000000000000",
|
||||
"weight": 1,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Generate devnet genesis data")
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
type=Path,
|
||||
default=Path("data/devnet/genesis.json"),
|
||||
help="Path to write the generated genesis file (default: data/devnet/genesis.json)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="Overwrite the genesis file if it already exists.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--faucet-address",
|
||||
default="ait1faucet000000000000000000000000000000000",
|
||||
help="Address seeded with devnet funds.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--faucet-balance",
|
||||
type=int,
|
||||
default=1_000_000_000,
|
||||
help="Faucet balance in smallest units.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--authorities",
|
||||
nargs="*",
|
||||
default=["ait1devproposer000000000000000000000000000000"],
|
||||
help="Authority addresses included in the genesis file.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def build_genesis(args: argparse.Namespace) -> dict:
|
||||
genesis = json.loads(json.dumps(DEFAULT_GENESIS)) # deep copy via JSON
|
||||
genesis["timestamp"] = int(time.time())
|
||||
genesis["accounts"][0]["address"] = args.faucet_address
|
||||
genesis["accounts"][0]["balance"] = args.faucet_balance
|
||||
genesis["authorities"] = [
|
||||
{"address": address, "weight": 1}
|
||||
for address in args.authorities
|
||||
]
|
||||
return genesis
|
||||
|
||||
|
||||
def write_genesis(path: Path, data: dict, force: bool) -> None:
|
||||
if path.exists() and not force:
|
||||
raise SystemExit(f"Genesis file already exists at {path}. Use --force to overwrite.")
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n", encoding="utf-8")
|
||||
print(f"[genesis] wrote genesis file to {path}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
genesis = build_genesis(args)
|
||||
write_genesis(args.output, genesis, args.force)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
apps/blockchain-node/scripts/mock_coordinator.py
Normal file
38
apps/blockchain-node/scripts/mock_coordinator.py
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Mock coordinator API for devnet testing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
app = FastAPI(title="Mock Coordinator API", version="0.1.0")
|
||||
|
||||
MOCK_JOBS: Dict[str, Dict[str, str]] = {
|
||||
"job_1": {"status": "complete", "price": "50000", "compute_units": 2500},
|
||||
"job_2": {"status": "complete", "price": "25000", "compute_units": 1200},
|
||||
}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health() -> Dict[str, str]:
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/attest/receipt")
|
||||
def attest_receipt(payload: Dict[str, str]) -> Dict[str, str | bool]:
|
||||
job_id = payload.get("job_id")
|
||||
if job_id in MOCK_JOBS:
|
||||
return {
|
||||
"exists": True,
|
||||
"paid": True,
|
||||
"not_double_spent": True,
|
||||
"quote": MOCK_JOBS[job_id],
|
||||
}
|
||||
return {
|
||||
"exists": False,
|
||||
"paid": False,
|
||||
"not_double_spent": False,
|
||||
"quote": {},
|
||||
}
|
||||
5
apps/blockchain-node/src/aitbc_chain/__init__.py
Normal file
5
apps/blockchain-node/src/aitbc_chain/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
"""AITBC blockchain node package."""
|
||||
|
||||
from .app import create_app
|
||||
|
||||
__all__ = ["create_app"]
|
||||
33
apps/blockchain-node/src/aitbc_chain/app.py
Normal file
33
apps/blockchain-node/src/aitbc_chain/app.py
Normal file
@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import APIRouter, FastAPI
|
||||
from fastapi.responses import PlainTextResponse
|
||||
|
||||
from .config import settings
|
||||
from .database import init_db
|
||||
from .metrics import metrics_registry
|
||||
from .rpc.router import router as rpc_router
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
init_db()
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(title="AITBC Blockchain Node", version="0.1.0", lifespan=lifespan)
|
||||
app.include_router(rpc_router, prefix="/rpc", tags=["rpc"])
|
||||
|
||||
metrics_router = APIRouter()
|
||||
|
||||
@metrics_router.get("/metrics", response_class=PlainTextResponse, tags=["metrics"], summary="Prometheus metrics")
|
||||
async def metrics() -> str:
|
||||
return metrics_registry.render_prometheus()
|
||||
|
||||
app.include_router(metrics_router)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
30
apps/blockchain-node/src/aitbc_chain/config.py
Normal file
30
apps/blockchain-node/src/aitbc_chain/config.py
Normal file
@ -0,0 +1,30 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class ChainSettings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
|
||||
|
||||
chain_id: str = "ait-devnet"
|
||||
db_path: Path = Path("./data/chain.db")
|
||||
|
||||
rpc_bind_host: str = "127.0.0.1"
|
||||
rpc_bind_port: int = 8080
|
||||
|
||||
p2p_bind_host: str = "0.0.0.0"
|
||||
p2p_bind_port: int = 7070
|
||||
|
||||
proposer_id: str = "ait-devnet-proposer"
|
||||
proposer_key: Optional[str] = None
|
||||
|
||||
mint_per_unit: int = 1000
|
||||
coordinator_ratio: float = 0.05
|
||||
|
||||
block_time_seconds: int = 2
|
||||
|
||||
|
||||
settings = ChainSettings()
|
||||
@ -0,0 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from .poa import PoAProposer, ProposerConfig
|
||||
|
||||
__all__ = ["PoAProposer", "ProposerConfig"]
|
||||
140
apps/blockchain-node/src/aitbc_chain/consensus/poa.py
Normal file
140
apps/blockchain-node/src/aitbc_chain/consensus/poa.py
Normal file
@ -0,0 +1,140 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Callable, ContextManager, Optional
|
||||
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from ..logging import get_logger
|
||||
from ..metrics import metrics_registry
|
||||
from ..models import Block
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProposerConfig:
|
||||
chain_id: str
|
||||
proposer_id: str
|
||||
interval_seconds: int
|
||||
|
||||
|
||||
class PoAProposer:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
config: ProposerConfig,
|
||||
session_factory: Callable[[], ContextManager[Session]],
|
||||
) -> None:
|
||||
self._config = config
|
||||
self._session_factory = session_factory
|
||||
self._logger = get_logger(__name__)
|
||||
self._stop_event = asyncio.Event()
|
||||
self._task: Optional[asyncio.Task[None]] = None
|
||||
|
||||
async def start(self) -> None:
|
||||
if self._task is not None:
|
||||
return
|
||||
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
||||
self._ensure_genesis_block()
|
||||
self._stop_event.clear()
|
||||
self._task = asyncio.create_task(self._run_loop(), name="poa-proposer-loop")
|
||||
|
||||
async def stop(self) -> None:
|
||||
if self._task is None:
|
||||
return
|
||||
self._logger.info("Stopping PoA proposer loop")
|
||||
self._stop_event.set()
|
||||
await self._task
|
||||
self._task = None
|
||||
|
||||
async def _run_loop(self) -> None:
|
||||
while not self._stop_event.is_set():
|
||||
await self._wait_until_next_slot()
|
||||
if self._stop_event.is_set():
|
||||
break
|
||||
try:
|
||||
self._propose_block()
|
||||
except Exception as exc: # pragma: no cover - defensive logging
|
||||
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
||||
|
||||
async def _wait_until_next_slot(self) -> None:
|
||||
head = self._fetch_chain_head()
|
||||
if head is None:
|
||||
return
|
||||
now = datetime.utcnow()
|
||||
elapsed = (now - head.timestamp).total_seconds()
|
||||
sleep_for = max(self._config.interval_seconds - elapsed, 0)
|
||||
if sleep_for <= 0:
|
||||
return
|
||||
try:
|
||||
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
||||
except asyncio.TimeoutError:
|
||||
return
|
||||
|
||||
def _propose_block(self) -> None:
|
||||
with self._session_factory() as session:
|
||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
next_height = 0
|
||||
parent_hash = "0x00"
|
||||
if head is not None:
|
||||
next_height = head.height + 1
|
||||
parent_hash = head.hash
|
||||
|
||||
timestamp = datetime.utcnow()
|
||||
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
||||
|
||||
block = Block(
|
||||
height=next_height,
|
||||
hash=block_hash,
|
||||
parent_hash=parent_hash,
|
||||
proposer=self._config.proposer_id,
|
||||
timestamp=timestamp,
|
||||
tx_count=0,
|
||||
state_root=None,
|
||||
)
|
||||
session.add(block)
|
||||
session.commit()
|
||||
|
||||
metrics_registry.increment("blocks_proposed_total")
|
||||
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
||||
|
||||
self._logger.info(
|
||||
"Proposed block",
|
||||
extra={
|
||||
"height": next_height,
|
||||
"hash": block_hash,
|
||||
"parent_hash": parent_hash,
|
||||
"timestamp": timestamp.isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
def _ensure_genesis_block(self) -> None:
|
||||
with self._session_factory() as session:
|
||||
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
if head is not None:
|
||||
return
|
||||
|
||||
timestamp = datetime.utcnow()
|
||||
genesis_hash = self._compute_block_hash(0, "0x00", timestamp)
|
||||
genesis = Block(
|
||||
height=0,
|
||||
hash=genesis_hash,
|
||||
parent_hash="0x00",
|
||||
proposer=self._config.proposer_id,
|
||||
timestamp=timestamp,
|
||||
tx_count=0,
|
||||
state_root=None,
|
||||
)
|
||||
session.add(genesis)
|
||||
session.commit()
|
||||
self._logger.info("Created genesis block", extra={"hash": genesis_hash})
|
||||
|
||||
def _fetch_chain_head(self) -> Optional[Block]:
|
||||
with self._session_factory() as session:
|
||||
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
|
||||
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime) -> str:
|
||||
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}".encode()
|
||||
return "0x" + hashlib.sha256(payload).hexdigest()
|
||||
20
apps/blockchain-node/src/aitbc_chain/database.py
Normal file
20
apps/blockchain-node/src/aitbc_chain/database.py
Normal file
@ -0,0 +1,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from sqlmodel import Session, SQLModel, create_engine
|
||||
|
||||
from .config import settings
|
||||
|
||||
_engine = create_engine(f"sqlite:///{settings.db_path}", echo=False)
|
||||
|
||||
|
||||
def init_db() -> None:
|
||||
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
SQLModel.metadata.create_all(_engine)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def session_scope() -> Session:
|
||||
with Session(_engine) as session:
|
||||
yield session
|
||||
71
apps/blockchain-node/src/aitbc_chain/logging.py
Normal file
71
apps/blockchain-node/src/aitbc_chain/logging.py
Normal file
@ -0,0 +1,71 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
RESERVED = {
|
||||
"name",
|
||||
"msg",
|
||||
"args",
|
||||
"levelname",
|
||||
"levelno",
|
||||
"pathname",
|
||||
"filename",
|
||||
"module",
|
||||
"exc_info",
|
||||
"exc_text",
|
||||
"stack_info",
|
||||
"lineno",
|
||||
"funcName",
|
||||
"created",
|
||||
"msecs",
|
||||
"relativeCreated",
|
||||
"thread",
|
||||
"threadName",
|
||||
"process",
|
||||
"processName",
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str: # type: ignore[override]
|
||||
payload: dict[str, Any] = {
|
||||
"timestamp": datetime.utcnow().isoformat() + "Z",
|
||||
"level": record.levelname,
|
||||
"logger": record.name,
|
||||
"message": record.getMessage(),
|
||||
}
|
||||
|
||||
for key, value in record.__dict__.items():
|
||||
if key in self.RESERVED or key.startswith("_"):
|
||||
continue
|
||||
payload[key] = value
|
||||
|
||||
if record.exc_info:
|
||||
payload["exc_info"] = self.formatException(record.exc_info)
|
||||
if record.stack_info:
|
||||
payload["stack"] = record.stack_info
|
||||
|
||||
return json.dumps(payload, default=str)
|
||||
|
||||
|
||||
def configure_logging(level: Optional[str] = None) -> None:
|
||||
log_level = getattr(logging, (level or "INFO").upper(), logging.INFO)
|
||||
root = logging.getLogger()
|
||||
if root.handlers:
|
||||
return
|
||||
|
||||
handler = logging.StreamHandler()
|
||||
formatter = JsonFormatter()
|
||||
handler.setFormatter(formatter)
|
||||
root.addHandler(handler)
|
||||
root.setLevel(log_level)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
if not logging.getLogger().handlers:
|
||||
configure_logging()
|
||||
return logging.getLogger(name)
|
||||
72
apps/blockchain-node/src/aitbc_chain/main.py
Normal file
72
apps/blockchain-node/src/aitbc_chain/main.py
Normal file
@ -0,0 +1,72 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Optional
|
||||
|
||||
from .config import settings
|
||||
from .consensus import PoAProposer, ProposerConfig
|
||||
from .database import init_db, session_scope
|
||||
from .logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class BlockchainNode:
|
||||
def __init__(self) -> None:
|
||||
self._stop_event = asyncio.Event()
|
||||
self._proposer: Optional[PoAProposer] = None
|
||||
|
||||
async def start(self) -> None:
|
||||
logger.info("Starting blockchain node", extra={"chain_id": settings.chain_id})
|
||||
init_db()
|
||||
self._start_proposer()
|
||||
try:
|
||||
await self._stop_event.wait()
|
||||
finally:
|
||||
await self._shutdown()
|
||||
|
||||
async def stop(self) -> None:
|
||||
logger.info("Stopping blockchain node")
|
||||
self._stop_event.set()
|
||||
await self._shutdown()
|
||||
|
||||
def _start_proposer(self) -> None:
|
||||
if self._proposer is not None:
|
||||
return
|
||||
|
||||
proposer_config = ProposerConfig(
|
||||
chain_id=settings.chain_id,
|
||||
proposer_id=settings.proposer_id,
|
||||
interval_seconds=settings.block_time_seconds,
|
||||
)
|
||||
self._proposer = PoAProposer(config=proposer_config, session_factory=session_scope)
|
||||
asyncio.create_task(self._proposer.start())
|
||||
|
||||
async def _shutdown(self) -> None:
|
||||
if self._proposer is None:
|
||||
return
|
||||
await self._proposer.stop()
|
||||
self._proposer = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def node_app() -> asyncio.AbstractAsyncContextManager[BlockchainNode]: # type: ignore[override]
|
||||
node = BlockchainNode()
|
||||
try:
|
||||
yield node
|
||||
finally:
|
||||
await node.stop()
|
||||
|
||||
|
||||
def run() -> None:
|
||||
asyncio.run(_run())
|
||||
|
||||
|
||||
async def _run() -> None:
|
||||
async with node_app() as node:
|
||||
await node.start()
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
run()
|
||||
47
apps/blockchain-node/src/aitbc_chain/mempool.py
Normal file
47
apps/blockchain-node/src/aitbc_chain/mempool.py
Normal file
@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from threading import Lock
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from .metrics import metrics_registry
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PendingTransaction:
|
||||
tx_hash: str
|
||||
content: Dict[str, Any]
|
||||
received_at: float
|
||||
|
||||
|
||||
class InMemoryMempool:
|
||||
def __init__(self) -> None:
|
||||
self._lock = Lock()
|
||||
self._transactions: Dict[str, PendingTransaction] = {}
|
||||
|
||||
def add(self, tx: Dict[str, Any]) -> str:
|
||||
tx_hash = self._compute_hash(tx)
|
||||
entry = PendingTransaction(tx_hash=tx_hash, content=tx, received_at=time.time())
|
||||
with self._lock:
|
||||
self._transactions[tx_hash] = entry
|
||||
metrics_registry.set_gauge("mempool_size", float(len(self._transactions)))
|
||||
return tx_hash
|
||||
|
||||
def list_transactions(self) -> List[PendingTransaction]:
|
||||
with self._lock:
|
||||
return list(self._transactions.values())
|
||||
|
||||
def _compute_hash(self, tx: Dict[str, Any]) -> str:
|
||||
canonical = json.dumps(tx, sort_keys=True, separators=(",", ":")).encode()
|
||||
digest = hashlib.sha256(canonical).hexdigest()
|
||||
return f"0x{digest}"
|
||||
|
||||
|
||||
_MEMPOOL = InMemoryMempool()
|
||||
|
||||
|
||||
def get_mempool() -> InMemoryMempool:
|
||||
return _MEMPOOL
|
||||
40
apps/blockchain-node/src/aitbc_chain/metrics.py
Normal file
40
apps/blockchain-node/src/aitbc_chain/metrics.py
Normal file
@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from threading import Lock
|
||||
from typing import Dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricValue:
|
||||
name: str
|
||||
value: float
|
||||
|
||||
|
||||
class MetricsRegistry:
|
||||
def __init__(self) -> None:
|
||||
self._counters: Dict[str, float] = {}
|
||||
self._gauges: Dict[str, float] = {}
|
||||
self._lock = Lock()
|
||||
|
||||
def increment(self, name: str, amount: float = 1.0) -> None:
|
||||
with self._lock:
|
||||
self._counters[name] = self._counters.get(name, 0.0) + amount
|
||||
|
||||
def set_gauge(self, name: str, value: float) -> None:
|
||||
with self._lock:
|
||||
self._gauges[name] = value
|
||||
|
||||
def render_prometheus(self) -> str:
|
||||
with self._lock:
|
||||
lines: list[str] = []
|
||||
for name, value in sorted(self._counters.items()):
|
||||
lines.append(f"# TYPE {name} counter")
|
||||
lines.append(f"{name} {value}")
|
||||
for name, value in sorted(self._gauges.items()):
|
||||
lines.append(f"# TYPE {name} gauge")
|
||||
lines.append(f"{name} {value}")
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
metrics_registry = MetricsRegistry()
|
||||
116
apps/blockchain-node/src/aitbc_chain/models.py
Normal file
116
apps/blockchain-node/src/aitbc_chain/models.py
Normal file
@ -0,0 +1,116 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
import re
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import field_validator
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy.types import JSON
|
||||
from sqlmodel import Field, Relationship, SQLModel
|
||||
|
||||
_HEX_PATTERN = re.compile(r"^(0x)?[0-9a-fA-F]+$")
|
||||
|
||||
|
||||
def _validate_hex(value: str, field_name: str) -> str:
|
||||
if not _HEX_PATTERN.fullmatch(value):
|
||||
raise ValueError(f"{field_name} must be a hex-encoded string")
|
||||
return value.lower()
|
||||
|
||||
|
||||
def _validate_optional_hex(value: Optional[str], field_name: str) -> Optional[str]:
|
||||
if value is None:
|
||||
return value
|
||||
return _validate_hex(value, field_name)
|
||||
|
||||
|
||||
class Block(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
height: int = Field(index=True, unique=True)
|
||||
hash: str = Field(index=True, unique=True)
|
||||
parent_hash: str
|
||||
proposer: str
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||
tx_count: int = 0
|
||||
state_root: Optional[str] = None
|
||||
|
||||
transactions: List["Transaction"] = Relationship(back_populates="block")
|
||||
receipts: List["Receipt"] = Relationship(back_populates="block")
|
||||
|
||||
@field_validator("hash", mode="before")
|
||||
@classmethod
|
||||
def _hash_is_hex(cls, value: str) -> str:
|
||||
return _validate_hex(value, "Block.hash")
|
||||
|
||||
@field_validator("parent_hash", mode="before")
|
||||
@classmethod
|
||||
def _parent_hash_is_hex(cls, value: str) -> str:
|
||||
return _validate_hex(value, "Block.parent_hash")
|
||||
|
||||
@field_validator("state_root", mode="before")
|
||||
@classmethod
|
||||
def _state_root_is_hex(cls, value: Optional[str]) -> Optional[str]:
|
||||
return _validate_optional_hex(value, "Block.state_root")
|
||||
|
||||
|
||||
class Transaction(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
tx_hash: str = Field(index=True, unique=True)
|
||||
block_height: Optional[int] = Field(
|
||||
default=None,
|
||||
index=True,
|
||||
foreign_key="block.height",
|
||||
)
|
||||
sender: str
|
||||
recipient: str
|
||||
payload: dict = Field(
|
||||
default_factory=dict,
|
||||
sa_column=Column(JSON, nullable=False),
|
||||
)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||
|
||||
block: Optional[Block] = Relationship(back_populates="transactions")
|
||||
|
||||
@field_validator("tx_hash", mode="before")
|
||||
@classmethod
|
||||
def _tx_hash_is_hex(cls, value: str) -> str:
|
||||
return _validate_hex(value, "Transaction.tx_hash")
|
||||
|
||||
|
||||
class Receipt(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
job_id: str = Field(index=True)
|
||||
receipt_id: str = Field(index=True, unique=True)
|
||||
block_height: Optional[int] = Field(
|
||||
default=None,
|
||||
index=True,
|
||||
foreign_key="block.height",
|
||||
)
|
||||
payload: dict = Field(
|
||||
default_factory=dict,
|
||||
sa_column=Column(JSON, nullable=False),
|
||||
)
|
||||
miner_signature: dict = Field(
|
||||
default_factory=dict,
|
||||
sa_column=Column(JSON, nullable=False),
|
||||
)
|
||||
coordinator_attestations: list[dict] = Field(
|
||||
default_factory=list,
|
||||
sa_column=Column(JSON, nullable=False),
|
||||
)
|
||||
minted_amount: Optional[int] = None
|
||||
recorded_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||
|
||||
block: Optional[Block] = Relationship(back_populates="receipts")
|
||||
|
||||
@field_validator("receipt_id", mode="before")
|
||||
@classmethod
|
||||
def _receipt_id_is_hex(cls, value: str) -> str:
|
||||
return _validate_hex(value, "Receipt.receipt_id")
|
||||
|
||||
|
||||
class Account(SQLModel, table=True):
|
||||
address: str = Field(primary_key=True)
|
||||
balance: int = 0
|
||||
nonce: int = 0
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
184
apps/blockchain-node/src/aitbc_chain/rpc/router.py
Normal file
184
apps/blockchain-node/src/aitbc_chain/rpc/router.py
Normal file
@ -0,0 +1,184 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, status
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
from sqlmodel import select
|
||||
|
||||
from ..database import session_scope
|
||||
from ..mempool import get_mempool
|
||||
from ..metrics import metrics_registry
|
||||
from ..models import Account, Block, Receipt, Transaction
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _serialize_receipt(receipt: Receipt) -> Dict[str, Any]:
|
||||
return {
|
||||
"receipt_id": receipt.receipt_id,
|
||||
"job_id": receipt.job_id,
|
||||
"payload": receipt.payload,
|
||||
"miner_signature": receipt.miner_signature,
|
||||
"coordinator_attestations": receipt.coordinator_attestations,
|
||||
"minted_amount": receipt.minted_amount,
|
||||
"recorded_at": receipt.recorded_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
class TransactionRequest(BaseModel):
|
||||
type: str = Field(description="Transaction type, e.g. TRANSFER or RECEIPT_CLAIM")
|
||||
sender: str
|
||||
nonce: int
|
||||
fee: int = Field(ge=0)
|
||||
payload: Dict[str, Any]
|
||||
sig: Optional[str] = Field(default=None, description="Signature payload")
|
||||
|
||||
@model_validator(mode="after")
|
||||
def normalize_type(self) -> "TransactionRequest": # type: ignore[override]
|
||||
normalized = self.type.upper()
|
||||
if normalized not in {"TRANSFER", "RECEIPT_CLAIM"}:
|
||||
raise ValueError(f"unsupported transaction type: {self.type}")
|
||||
self.type = normalized
|
||||
return self
|
||||
|
||||
|
||||
class ReceiptSubmissionRequest(BaseModel):
|
||||
sender: str
|
||||
nonce: int
|
||||
fee: int = Field(ge=0)
|
||||
payload: Dict[str, Any]
|
||||
sig: Optional[str] = None
|
||||
|
||||
|
||||
class EstimateFeeRequest(BaseModel):
|
||||
type: Optional[str] = None
|
||||
payload: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class MintFaucetRequest(BaseModel):
|
||||
address: str
|
||||
amount: int = Field(gt=0)
|
||||
|
||||
|
||||
@router.get("/head", summary="Get current chain head")
|
||||
async def get_head() -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||
if result is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
||||
return {
|
||||
"height": result.height,
|
||||
"hash": result.hash,
|
||||
"timestamp": result.timestamp.isoformat(),
|
||||
"tx_count": result.tx_count,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/blocks/{height}", summary="Get block by height")
|
||||
async def get_block(height: int) -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
block = session.exec(select(Block).where(Block.height == height)).first()
|
||||
if block is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="block not found")
|
||||
return {
|
||||
"height": block.height,
|
||||
"hash": block.hash,
|
||||
"parent_hash": block.parent_hash,
|
||||
"timestamp": block.timestamp.isoformat(),
|
||||
"tx_count": block.tx_count,
|
||||
"state_root": block.state_root,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
||||
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
||||
if tx is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
||||
return {
|
||||
"tx_hash": tx.tx_hash,
|
||||
"block_height": tx.block_height,
|
||||
"sender": tx.sender,
|
||||
"recipient": tx.recipient,
|
||||
"payload": tx.payload,
|
||||
"created_at": tx.created_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/receipts/{receipt_id}", summary="Get receipt by ID")
|
||||
async def get_receipt(receipt_id: str) -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
receipt = session.exec(select(Receipt).where(Receipt.receipt_id == receipt_id)).first()
|
||||
if receipt is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not found")
|
||||
return _serialize_receipt(receipt)
|
||||
|
||||
|
||||
@router.get("/getBalance/{address}", summary="Get account balance")
|
||||
async def get_balance(address: str) -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, address)
|
||||
if account is None:
|
||||
return {"address": address, "balance": 0, "nonce": 0}
|
||||
return {
|
||||
"address": account.address,
|
||||
"balance": account.balance,
|
||||
"nonce": account.nonce,
|
||||
"updated_at": account.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/sendTx", summary="Submit a new transaction")
|
||||
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||
mempool = get_mempool()
|
||||
tx_dict = request.model_dump()
|
||||
tx_hash = mempool.add(tx_dict)
|
||||
metrics_registry.increment("rpc_send_tx_total")
|
||||
return {"tx_hash": tx_hash}
|
||||
|
||||
|
||||
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
||||
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||
tx_payload = {
|
||||
"type": "RECEIPT_CLAIM",
|
||||
"sender": request.sender,
|
||||
"nonce": request.nonce,
|
||||
"fee": request.fee,
|
||||
"payload": request.payload,
|
||||
"sig": request.sig,
|
||||
}
|
||||
tx_request = TransactionRequest.model_validate(tx_payload)
|
||||
metrics_registry.increment("rpc_submit_receipt_total")
|
||||
return await send_transaction(tx_request)
|
||||
|
||||
|
||||
@router.post("/estimateFee", summary="Estimate transaction fee")
|
||||
async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||
base_fee = 10
|
||||
per_byte = 1
|
||||
payload_bytes = len(json.dumps(request.payload, sort_keys=True, separators=(",", ":")).encode())
|
||||
estimated_fee = base_fee + per_byte * payload_bytes
|
||||
tx_type = (request.type or "TRANSFER").upper()
|
||||
return {
|
||||
"type": tx_type,
|
||||
"base_fee": base_fee,
|
||||
"payload_bytes": payload_bytes,
|
||||
"estimated_fee": estimated_fee,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
||||
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||
with session_scope() as session:
|
||||
account = session.get(Account, request.address)
|
||||
if account is None:
|
||||
account = Account(address=request.address, balance=request.amount)
|
||||
session.add(account)
|
||||
else:
|
||||
account.balance += request.amount
|
||||
session.commit()
|
||||
updated_balance = account.balance
|
||||
return {"address": request.address, "balance": updated_balance}
|
||||
Reference in New Issue
Block a user