chore: initialize monorepo with project scaffolding, configs, and CI setup
This commit is contained in:
13
.editorconfig
Normal file
13
.editorconfig
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# Editor configuration for AITBC monorepo
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.{py,js,ts,tsx,json,yaml,yml,md}]
|
||||||
|
indent_size = 2
|
||||||
34
.github/workflows/python-tests.yml
vendored
Normal file
34
.github/workflows/python-tests.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
name: Python Project Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: '1.7.1'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
poetry install --with dev
|
||||||
|
|
||||||
|
- name: Run Python test suites
|
||||||
|
run: |
|
||||||
|
chmod +x scripts/ci/run_python_tests.sh
|
||||||
|
./scripts/ci/run_python_tests.sh
|
||||||
31
.gitignore
vendored
Normal file
31
.gitignore
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
# AITBC Monorepo ignore rules
|
||||||
|
|
||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
*.pyd
|
||||||
|
*.so
|
||||||
|
.venv/
|
||||||
|
venv/
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
|
||||||
|
# Node / JS
|
||||||
|
node_modules/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
.npm/
|
||||||
|
yarn.lock
|
||||||
|
package-lock.json
|
||||||
|
pnpm-lock.yaml
|
||||||
|
|
||||||
|
# Editor
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# OS
|
||||||
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
8
.windsurf/workflows/docs.md
Normal file
8
.windsurf/workflows/docs.md
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
description: docs/done.md docs/roadmap.md
|
||||||
|
auto_execution_mode: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
update docs/done.md docs/roadmap.md first
|
||||||
|
and after all others in docs/
|
||||||
|
but not in docs/bootstrap/
|
||||||
10
.windsurf/workflows/ns.md
Normal file
10
.windsurf/workflows/ns.md
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
description: Identify the most important first step and do it.
|
||||||
|
auto_execution_mode: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
Identify the most important first step and do it.
|
||||||
|
if
|
||||||
|
No tasks are currently in progress.
|
||||||
|
then
|
||||||
|
Check docs/roadmap.md and carry out the next recommended step.
|
||||||
6
.windsurf/workflows/roadmap.md
Normal file
6
.windsurf/workflows/roadmap.md
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
description: docs/roadmap.md
|
||||||
|
auto_execution_mode: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
Check docs/roadmap.md and carry out the next recommended step.
|
||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2025 AITBC
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
16
README.md
Normal file
16
README.md
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# AITBC Monorepo
|
||||||
|
|
||||||
|
This repository houses all components of the Artificial Intelligence Token Blockchain (AITBC) stack, including coordinator services, blockchain node, miner daemon, client-facing web apps, SDKs, and documentation.
|
||||||
|
|
||||||
|
## Repository Layout
|
||||||
|
|
||||||
|
Refer to `docs/bootstrap/dirs.md` for the authoritative directory breakdown and follow-up implementation tasks.
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
1. Review the bootstrap documents under `docs/bootstrap/` to understand stage-specific goals.
|
||||||
|
2. Fill in service-specific READMEs located under `apps/` and `packages/` as the implementations progress.
|
||||||
|
3. Use the provided directory scaffold as the starting point for coding each subsystem.
|
||||||
|
4. Explore the new Python receipt SDK under `packages/py/aitbc-sdk/` for helpers to fetch and verify coordinator receipts (see `docs/run.md` for examples).
|
||||||
|
5. Run `scripts/ci/run_python_tests.sh` (via Poetry) to execute coordinator, SDK, miner-node, and wallet-daemon test suites before submitting changes.
|
||||||
|
6. GitHub Actions (`.github/workflows/python-tests.yml`) automatically runs the same script on pushes and pull requests targeting `main`.
|
||||||
25
apps/blockchain-node/README.md
Normal file
25
apps/blockchain-node/README.md
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Blockchain Node
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
Minimal asset-backed blockchain node that validates compute receipts and mints AIT tokens as described in `docs/bootstrap/blockchain_node.md`.
|
||||||
|
|
||||||
|
## Status
|
||||||
|
|
||||||
|
Scaffolded. Implementation pending per staged roadmap.
|
||||||
|
|
||||||
|
## Devnet Tooling
|
||||||
|
|
||||||
|
- `scripts/make_genesis.py` — Generate a deterministic devnet genesis file (`data/devnet/genesis.json`).
|
||||||
|
- `scripts/keygen.py` — Produce throwaway devnet keypairs (printed or written to disk).
|
||||||
|
- `scripts/devnet_up.sh` — Launch the blockchain node and RPC API with a freshly generated genesis file.
|
||||||
|
|
||||||
|
### Quickstart
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd apps/blockchain-node
|
||||||
|
python scripts/make_genesis.py --force
|
||||||
|
bash scripts/devnet_up.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
The script sets `PYTHONPATH=src` and starts the proposer loop plus the FastAPI app (via `uvicorn`). Press `Ctrl+C` to stop the devnet.
|
||||||
147
apps/blockchain-node/alembic.ini
Normal file
147
apps/blockchain-node/alembic.ini
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts.
|
||||||
|
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||||
|
# format, relative to the token %(here)s which refers to the location of this
|
||||||
|
# ini file
|
||||||
|
script_location = %(here)s/migrations
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
# is defined by "path_separator" below.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to <script_location>/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "path_separator"
|
||||||
|
# below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||||
|
|
||||||
|
# path_separator; This indicates what character is used to split lists of file
|
||||||
|
# paths, including version_locations and prepend_sys_path within configparser
|
||||||
|
# files such as alembic.ini.
|
||||||
|
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||||
|
# to provide os-dependent path splitting.
|
||||||
|
#
|
||||||
|
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||||
|
# take place if path_separator is not present in alembic.ini. If this
|
||||||
|
# option is omitted entirely, fallback logic is as follows:
|
||||||
|
#
|
||||||
|
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||||
|
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||||
|
# behavior of splitting on spaces, commas, or colons.
|
||||||
|
#
|
||||||
|
# Valid values for path_separator are:
|
||||||
|
#
|
||||||
|
# path_separator = :
|
||||||
|
# path_separator = ;
|
||||||
|
# path_separator = space
|
||||||
|
# path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# database URL. This is consumed by the user-maintained env.py script only.
|
||||||
|
# other means of configuring database URLs may be customized within the env.py
|
||||||
|
# file.
|
||||||
|
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = module
|
||||||
|
# ruff.module = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration. This is also consumed by the user-maintained
|
||||||
|
# env.py script only.
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
BIN
apps/blockchain-node/data/chain.db
Normal file
BIN
apps/blockchain-node/data/chain.db
Normal file
Binary file not shown.
1
apps/blockchain-node/migrations/README
Normal file
1
apps/blockchain-node/migrations/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
85
apps/blockchain-node/migrations/env.py
Normal file
85
apps/blockchain-node/migrations/env.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
from sqlmodel import SQLModel
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
from aitbc_chain.config import settings
|
||||||
|
from aitbc_chain import models # noqa: F401
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Ensure the database path exists and propagate URL to Alembic config
|
||||||
|
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
config.set_main_option("sqlalchemy.url", f"sqlite:///{settings.db_path}")
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# Use SQLModel metadata for autogeneration.
|
||||||
|
target_metadata = SQLModel.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
28
apps/blockchain-node/migrations/script.py.mako
Normal file
28
apps/blockchain-node/migrations/script.py.mako
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
@ -0,0 +1,34 @@
|
|||||||
|
"""add block relationships
|
||||||
|
|
||||||
|
Revision ID: 80bc0020bde2
|
||||||
|
Revises: e31f486f1484
|
||||||
|
Create Date: 2025-09-27 06:02:11.656859
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '80bc0020bde2'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'e31f486f1484'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_foreign_key(None, 'receipt', 'block', ['block_height'], ['height'])
|
||||||
|
op.create_foreign_key(None, 'transaction', 'block', ['block_height'], ['height'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(None, 'transaction', type_='foreignkey')
|
||||||
|
op.drop_constraint(None, 'receipt', type_='foreignkey')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@ -0,0 +1,103 @@
|
|||||||
|
"""baseline
|
||||||
|
|
||||||
|
Revision ID: e31f486f1484
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-09-27 05:58:27.490151
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "e31f486f1484"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"block",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("height", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("parent_hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("proposer", sa.String(), nullable=False),
|
||||||
|
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("tx_count", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("state_root", sa.String(), nullable=True),
|
||||||
|
)
|
||||||
|
op.create_index("ix_block_height", "block", ["height"], unique=True)
|
||||||
|
op.create_index("ix_block_hash", "block", ["hash"], unique=True)
|
||||||
|
op.create_index("ix_block_timestamp", "block", ["timestamp"], unique=False)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"transaction",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("tx_hash", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("sender", sa.String(), nullable=False),
|
||||||
|
sa.Column("recipient", sa.String(), nullable=False),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_transaction_tx_hash", "transaction", ["tx_hash"], unique=True)
|
||||||
|
op.create_index(
|
||||||
|
"ix_transaction_block_height", "transaction", ["block_height"], unique=False
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"ix_transaction_created_at", "transaction", ["created_at"], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"receipt",
|
||||||
|
sa.Column("id", sa.Integer(), primary_key=True, nullable=False),
|
||||||
|
sa.Column("job_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("receipt_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("block_height", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("payload", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("miner_signature", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("coordinator_attestations", sa.JSON(), nullable=False),
|
||||||
|
sa.Column("minted_amount", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("recorded_at", sa.DateTime(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index("ix_receipt_job_id", "receipt", ["job_id"], unique=False)
|
||||||
|
op.create_index("ix_receipt_receipt_id", "receipt", ["receipt_id"], unique=True)
|
||||||
|
op.create_index("ix_receipt_block_height", "receipt", ["block_height"], unique=False)
|
||||||
|
op.create_index("ix_receipt_recorded_at", "receipt", ["recorded_at"], unique=False)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
"account",
|
||||||
|
sa.Column("address", sa.String(), nullable=False),
|
||||||
|
sa.Column("balance", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("nonce", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("address"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
|
||||||
|
op.drop_table("account")
|
||||||
|
|
||||||
|
op.drop_index("ix_receipt_recorded_at", table_name="receipt")
|
||||||
|
op.drop_index("ix_receipt_block_height", table_name="receipt")
|
||||||
|
op.drop_index("ix_receipt_receipt_id", table_name="receipt")
|
||||||
|
op.drop_index("ix_receipt_job_id", table_name="receipt")
|
||||||
|
op.drop_table("receipt")
|
||||||
|
|
||||||
|
op.drop_index("ix_transaction_created_at", table_name="transaction")
|
||||||
|
op.drop_index("ix_transaction_block_height", table_name="transaction")
|
||||||
|
op.drop_index("ix_transaction_tx_hash", table_name="transaction")
|
||||||
|
op.drop_table("transaction")
|
||||||
|
|
||||||
|
op.drop_index("ix_block_timestamp", table_name="block")
|
||||||
|
op.drop_index("ix_block_hash", table_name="block")
|
||||||
|
op.drop_index("ix_block_height", table_name="block")
|
||||||
|
op.drop_table("block")
|
||||||
1673
apps/blockchain-node/poetry.lock
generated
Normal file
1673
apps/blockchain-node/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
37
apps/blockchain-node/pyproject.toml
Normal file
37
apps/blockchain-node/pyproject.toml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "aitbc-blockchain-node"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "AITBC blockchain node service"
|
||||||
|
authors = ["AITBC Team"]
|
||||||
|
packages = [
|
||||||
|
{ include = "aitbc_chain", from = "src" }
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.11"
|
||||||
|
fastapi = "^0.111.0"
|
||||||
|
uvicorn = { extras = ["standard"], version = "^0.30.0" }
|
||||||
|
sqlmodel = "^0.0.16"
|
||||||
|
sqlalchemy = "^2.0.30"
|
||||||
|
alembic = "^1.13.1"
|
||||||
|
aiosqlite = "^0.20.0"
|
||||||
|
websockets = "^12.0"
|
||||||
|
pydantic = "^2.7.0"
|
||||||
|
pydantic-settings = "^2.2.1"
|
||||||
|
orjson = "^3.10.0"
|
||||||
|
python-dotenv = "^1.0.1"
|
||||||
|
httpx = "^0.27.0"
|
||||||
|
uvloop = { version = "^0.19.0", optional = true }
|
||||||
|
rich = "^13.7.1"
|
||||||
|
cryptography = "^42.0.5"
|
||||||
|
|
||||||
|
[tool.poetry.extras]
|
||||||
|
uvloop = ["uvloop"]
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
pytest = "^8.2.0"
|
||||||
|
pytest-asyncio = "^0.23.0"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
36
apps/blockchain-node/scripts/devnet_up.sh
Normal file
36
apps/blockchain-node/scripts/devnet_up.sh
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||||
|
export PYTHONPATH="${ROOT_DIR}/src:${ROOT_DIR}/scripts:${PYTHONPATH:-}"
|
||||||
|
|
||||||
|
GENESIS_PATH="${ROOT_DIR}/data/devnet/genesis.json"
|
||||||
|
python "${ROOT_DIR}/scripts/make_genesis.py" --output "${GENESIS_PATH}" --force
|
||||||
|
|
||||||
|
echo "[devnet] Generated genesis at ${GENESIS_PATH}"
|
||||||
|
|
||||||
|
declare -a CHILD_PIDS=()
|
||||||
|
cleanup() {
|
||||||
|
for pid in "${CHILD_PIDS[@]}"; do
|
||||||
|
if kill -0 "$pid" 2>/dev/null; then
|
||||||
|
kill "$pid" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
python -m aitbc_chain.main &
|
||||||
|
CHILD_PIDS+=($!)
|
||||||
|
echo "[devnet] Blockchain node started (PID ${CHILD_PIDS[-1]})"
|
||||||
|
|
||||||
|
sleep 1
|
||||||
|
|
||||||
|
python -m uvicorn aitbc_chain.app:app --host 127.0.0.1 --port 8080 --log-level info &
|
||||||
|
CHILD_PIDS+=($!)
|
||||||
|
echo "[devnet] RPC API serving at http://127.0.0.1:8080"
|
||||||
|
|
||||||
|
python -m uvicorn mock_coordinator:app --host 127.0.0.1 --port 8090 --log-level info &
|
||||||
|
CHILD_PIDS+=($!)
|
||||||
|
echo "[devnet] Mock coordinator serving at http://127.0.0.1:8090"
|
||||||
|
|
||||||
|
wait
|
||||||
46
apps/blockchain-node/scripts/keygen.py
Normal file
46
apps/blockchain-node/scripts/keygen.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Generate a pseudo devnet key pair for blockchain components."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import secrets
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description="Generate a devnet key pair")
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
type=Path,
|
||||||
|
help="Optional path to write the keypair JSON (prints to stdout if omitted)",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_keypair() -> dict:
|
||||||
|
private_key = secrets.token_hex(32)
|
||||||
|
public_key = secrets.token_hex(32)
|
||||||
|
address = "ait1" + secrets.token_hex(20)
|
||||||
|
return {
|
||||||
|
"private_key": private_key,
|
||||||
|
"public_key": public_key,
|
||||||
|
"address": address,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
args = parse_args()
|
||||||
|
keypair = generate_keypair()
|
||||||
|
payload = json.dumps(keypair, indent=2)
|
||||||
|
if args.output:
|
||||||
|
args.output.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
args.output.write_text(payload + "\n", encoding="utf-8")
|
||||||
|
print(f"[keygen] wrote keypair to {args.output}")
|
||||||
|
else:
|
||||||
|
print(payload)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
96
apps/blockchain-node/scripts/make_genesis.py
Normal file
96
apps/blockchain-node/scripts/make_genesis.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Generate a deterministic devnet genesis file for the blockchain node."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
DEFAULT_GENESIS = {
|
||||||
|
"chain_id": "ait-devnet",
|
||||||
|
"timestamp": None, # populated at runtime
|
||||||
|
"params": {
|
||||||
|
"mint_per_unit": 1000,
|
||||||
|
"coordinator_ratio": 0.05,
|
||||||
|
"base_fee": 10,
|
||||||
|
"fee_per_byte": 1,
|
||||||
|
},
|
||||||
|
"accounts": [
|
||||||
|
{
|
||||||
|
"address": "ait1faucet000000000000000000000000000000000",
|
||||||
|
"balance": 1_000_000_000,
|
||||||
|
"nonce": 0,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"authorities": [
|
||||||
|
{
|
||||||
|
"address": "ait1devproposer000000000000000000000000000000",
|
||||||
|
"weight": 1,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser(description="Generate devnet genesis data")
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
type=Path,
|
||||||
|
default=Path("data/devnet/genesis.json"),
|
||||||
|
help="Path to write the generated genesis file (default: data/devnet/genesis.json)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--force",
|
||||||
|
action="store_true",
|
||||||
|
help="Overwrite the genesis file if it already exists.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--faucet-address",
|
||||||
|
default="ait1faucet000000000000000000000000000000000",
|
||||||
|
help="Address seeded with devnet funds.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--faucet-balance",
|
||||||
|
type=int,
|
||||||
|
default=1_000_000_000,
|
||||||
|
help="Faucet balance in smallest units.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--authorities",
|
||||||
|
nargs="*",
|
||||||
|
default=["ait1devproposer000000000000000000000000000000"],
|
||||||
|
help="Authority addresses included in the genesis file.",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def build_genesis(args: argparse.Namespace) -> dict:
|
||||||
|
genesis = json.loads(json.dumps(DEFAULT_GENESIS)) # deep copy via JSON
|
||||||
|
genesis["timestamp"] = int(time.time())
|
||||||
|
genesis["accounts"][0]["address"] = args.faucet_address
|
||||||
|
genesis["accounts"][0]["balance"] = args.faucet_balance
|
||||||
|
genesis["authorities"] = [
|
||||||
|
{"address": address, "weight": 1}
|
||||||
|
for address in args.authorities
|
||||||
|
]
|
||||||
|
return genesis
|
||||||
|
|
||||||
|
|
||||||
|
def write_genesis(path: Path, data: dict, force: bool) -> None:
|
||||||
|
if path.exists() and not force:
|
||||||
|
raise SystemExit(f"Genesis file already exists at {path}. Use --force to overwrite.")
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n", encoding="utf-8")
|
||||||
|
print(f"[genesis] wrote genesis file to {path}")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
args = parse_args()
|
||||||
|
genesis = build_genesis(args)
|
||||||
|
write_genesis(args.output, genesis, args.force)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
38
apps/blockchain-node/scripts/mock_coordinator.py
Normal file
38
apps/blockchain-node/scripts/mock_coordinator.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Mock coordinator API for devnet testing."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from fastapi import FastAPI
|
||||||
|
|
||||||
|
app = FastAPI(title="Mock Coordinator API", version="0.1.0")
|
||||||
|
|
||||||
|
MOCK_JOBS: Dict[str, Dict[str, str]] = {
|
||||||
|
"job_1": {"status": "complete", "price": "50000", "compute_units": 2500},
|
||||||
|
"job_2": {"status": "complete", "price": "25000", "compute_units": 1200},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health")
|
||||||
|
def health() -> Dict[str, str]:
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/attest/receipt")
|
||||||
|
def attest_receipt(payload: Dict[str, str]) -> Dict[str, str | bool]:
|
||||||
|
job_id = payload.get("job_id")
|
||||||
|
if job_id in MOCK_JOBS:
|
||||||
|
return {
|
||||||
|
"exists": True,
|
||||||
|
"paid": True,
|
||||||
|
"not_double_spent": True,
|
||||||
|
"quote": MOCK_JOBS[job_id],
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
"exists": False,
|
||||||
|
"paid": False,
|
||||||
|
"not_double_spent": False,
|
||||||
|
"quote": {},
|
||||||
|
}
|
||||||
5
apps/blockchain-node/src/aitbc_chain/__init__.py
Normal file
5
apps/blockchain-node/src/aitbc_chain/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""AITBC blockchain node package."""
|
||||||
|
|
||||||
|
from .app import create_app
|
||||||
|
|
||||||
|
__all__ = ["create_app"]
|
||||||
33
apps/blockchain-node/src/aitbc_chain/app.py
Normal file
33
apps/blockchain-node/src/aitbc_chain/app.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
|
from fastapi import APIRouter, FastAPI
|
||||||
|
from fastapi.responses import PlainTextResponse
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
from .database import init_db
|
||||||
|
from .metrics import metrics_registry
|
||||||
|
from .rpc.router import router as rpc_router
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
init_db()
|
||||||
|
|
||||||
|
def create_app() -> FastAPI:
|
||||||
|
app = FastAPI(title="AITBC Blockchain Node", version="0.1.0", lifespan=lifespan)
|
||||||
|
app.include_router(rpc_router, prefix="/rpc", tags=["rpc"])
|
||||||
|
|
||||||
|
metrics_router = APIRouter()
|
||||||
|
|
||||||
|
@metrics_router.get("/metrics", response_class=PlainTextResponse, tags=["metrics"], summary="Prometheus metrics")
|
||||||
|
async def metrics() -> str:
|
||||||
|
return metrics_registry.render_prometheus()
|
||||||
|
|
||||||
|
app.include_router(metrics_router)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
30
apps/blockchain-node/src/aitbc_chain/config.py
Normal file
30
apps/blockchain-node/src/aitbc_chain/config.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class ChainSettings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
|
||||||
|
|
||||||
|
chain_id: str = "ait-devnet"
|
||||||
|
db_path: Path = Path("./data/chain.db")
|
||||||
|
|
||||||
|
rpc_bind_host: str = "127.0.0.1"
|
||||||
|
rpc_bind_port: int = 8080
|
||||||
|
|
||||||
|
p2p_bind_host: str = "0.0.0.0"
|
||||||
|
p2p_bind_port: int = 7070
|
||||||
|
|
||||||
|
proposer_id: str = "ait-devnet-proposer"
|
||||||
|
proposer_key: Optional[str] = None
|
||||||
|
|
||||||
|
mint_per_unit: int = 1000
|
||||||
|
coordinator_ratio: float = 0.05
|
||||||
|
|
||||||
|
block_time_seconds: int = 2
|
||||||
|
|
||||||
|
|
||||||
|
settings = ChainSettings()
|
||||||
@ -0,0 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .poa import PoAProposer, ProposerConfig
|
||||||
|
|
||||||
|
__all__ = ["PoAProposer", "ProposerConfig"]
|
||||||
140
apps/blockchain-node/src/aitbc_chain/consensus/poa.py
Normal file
140
apps/blockchain-node/src/aitbc_chain/consensus/poa.py
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Callable, ContextManager, Optional
|
||||||
|
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from ..logging import get_logger
|
||||||
|
from ..metrics import metrics_registry
|
||||||
|
from ..models import Block
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProposerConfig:
|
||||||
|
chain_id: str
|
||||||
|
proposer_id: str
|
||||||
|
interval_seconds: int
|
||||||
|
|
||||||
|
|
||||||
|
class PoAProposer:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
config: ProposerConfig,
|
||||||
|
session_factory: Callable[[], ContextManager[Session]],
|
||||||
|
) -> None:
|
||||||
|
self._config = config
|
||||||
|
self._session_factory = session_factory
|
||||||
|
self._logger = get_logger(__name__)
|
||||||
|
self._stop_event = asyncio.Event()
|
||||||
|
self._task: Optional[asyncio.Task[None]] = None
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
if self._task is not None:
|
||||||
|
return
|
||||||
|
self._logger.info("Starting PoA proposer loop", extra={"interval": self._config.interval_seconds})
|
||||||
|
self._ensure_genesis_block()
|
||||||
|
self._stop_event.clear()
|
||||||
|
self._task = asyncio.create_task(self._run_loop(), name="poa-proposer-loop")
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
if self._task is None:
|
||||||
|
return
|
||||||
|
self._logger.info("Stopping PoA proposer loop")
|
||||||
|
self._stop_event.set()
|
||||||
|
await self._task
|
||||||
|
self._task = None
|
||||||
|
|
||||||
|
async def _run_loop(self) -> None:
|
||||||
|
while not self._stop_event.is_set():
|
||||||
|
await self._wait_until_next_slot()
|
||||||
|
if self._stop_event.is_set():
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
self._propose_block()
|
||||||
|
except Exception as exc: # pragma: no cover - defensive logging
|
||||||
|
self._logger.exception("Failed to propose block", extra={"error": str(exc)})
|
||||||
|
|
||||||
|
async def _wait_until_next_slot(self) -> None:
|
||||||
|
head = self._fetch_chain_head()
|
||||||
|
if head is None:
|
||||||
|
return
|
||||||
|
now = datetime.utcnow()
|
||||||
|
elapsed = (now - head.timestamp).total_seconds()
|
||||||
|
sleep_for = max(self._config.interval_seconds - elapsed, 0)
|
||||||
|
if sleep_for <= 0:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self._stop_event.wait(), timeout=sleep_for)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
return
|
||||||
|
|
||||||
|
def _propose_block(self) -> None:
|
||||||
|
with self._session_factory() as session:
|
||||||
|
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
|
next_height = 0
|
||||||
|
parent_hash = "0x00"
|
||||||
|
if head is not None:
|
||||||
|
next_height = head.height + 1
|
||||||
|
parent_hash = head.hash
|
||||||
|
|
||||||
|
timestamp = datetime.utcnow()
|
||||||
|
block_hash = self._compute_block_hash(next_height, parent_hash, timestamp)
|
||||||
|
|
||||||
|
block = Block(
|
||||||
|
height=next_height,
|
||||||
|
hash=block_hash,
|
||||||
|
parent_hash=parent_hash,
|
||||||
|
proposer=self._config.proposer_id,
|
||||||
|
timestamp=timestamp,
|
||||||
|
tx_count=0,
|
||||||
|
state_root=None,
|
||||||
|
)
|
||||||
|
session.add(block)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
metrics_registry.increment("blocks_proposed_total")
|
||||||
|
metrics_registry.set_gauge("chain_head_height", float(next_height))
|
||||||
|
|
||||||
|
self._logger.info(
|
||||||
|
"Proposed block",
|
||||||
|
extra={
|
||||||
|
"height": next_height,
|
||||||
|
"hash": block_hash,
|
||||||
|
"parent_hash": parent_hash,
|
||||||
|
"timestamp": timestamp.isoformat(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _ensure_genesis_block(self) -> None:
|
||||||
|
with self._session_factory() as session:
|
||||||
|
head = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
|
if head is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
timestamp = datetime.utcnow()
|
||||||
|
genesis_hash = self._compute_block_hash(0, "0x00", timestamp)
|
||||||
|
genesis = Block(
|
||||||
|
height=0,
|
||||||
|
hash=genesis_hash,
|
||||||
|
parent_hash="0x00",
|
||||||
|
proposer=self._config.proposer_id,
|
||||||
|
timestamp=timestamp,
|
||||||
|
tx_count=0,
|
||||||
|
state_root=None,
|
||||||
|
)
|
||||||
|
session.add(genesis)
|
||||||
|
session.commit()
|
||||||
|
self._logger.info("Created genesis block", extra={"hash": genesis_hash})
|
||||||
|
|
||||||
|
def _fetch_chain_head(self) -> Optional[Block]:
|
||||||
|
with self._session_factory() as session:
|
||||||
|
return session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
|
|
||||||
|
def _compute_block_hash(self, height: int, parent_hash: str, timestamp: datetime) -> str:
|
||||||
|
payload = f"{self._config.chain_id}|{height}|{parent_hash}|{timestamp.isoformat()}".encode()
|
||||||
|
return "0x" + hashlib.sha256(payload).hexdigest()
|
||||||
20
apps/blockchain-node/src/aitbc_chain/database.py
Normal file
20
apps/blockchain-node/src/aitbc_chain/database.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
from sqlmodel import Session, SQLModel, create_engine
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
|
||||||
|
_engine = create_engine(f"sqlite:///{settings.db_path}", echo=False)
|
||||||
|
|
||||||
|
|
||||||
|
def init_db() -> None:
|
||||||
|
settings.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
SQLModel.metadata.create_all(_engine)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def session_scope() -> Session:
|
||||||
|
with Session(_engine) as session:
|
||||||
|
yield session
|
||||||
71
apps/blockchain-node/src/aitbc_chain/logging.py
Normal file
71
apps/blockchain-node/src/aitbc_chain/logging.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class JsonFormatter(logging.Formatter):
|
||||||
|
RESERVED = {
|
||||||
|
"name",
|
||||||
|
"msg",
|
||||||
|
"args",
|
||||||
|
"levelname",
|
||||||
|
"levelno",
|
||||||
|
"pathname",
|
||||||
|
"filename",
|
||||||
|
"module",
|
||||||
|
"exc_info",
|
||||||
|
"exc_text",
|
||||||
|
"stack_info",
|
||||||
|
"lineno",
|
||||||
|
"funcName",
|
||||||
|
"created",
|
||||||
|
"msecs",
|
||||||
|
"relativeCreated",
|
||||||
|
"thread",
|
||||||
|
"threadName",
|
||||||
|
"process",
|
||||||
|
"processName",
|
||||||
|
}
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str: # type: ignore[override]
|
||||||
|
payload: dict[str, Any] = {
|
||||||
|
"timestamp": datetime.utcnow().isoformat() + "Z",
|
||||||
|
"level": record.levelname,
|
||||||
|
"logger": record.name,
|
||||||
|
"message": record.getMessage(),
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value in record.__dict__.items():
|
||||||
|
if key in self.RESERVED or key.startswith("_"):
|
||||||
|
continue
|
||||||
|
payload[key] = value
|
||||||
|
|
||||||
|
if record.exc_info:
|
||||||
|
payload["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
if record.stack_info:
|
||||||
|
payload["stack"] = record.stack_info
|
||||||
|
|
||||||
|
return json.dumps(payload, default=str)
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logging(level: Optional[str] = None) -> None:
|
||||||
|
log_level = getattr(logging, (level or "INFO").upper(), logging.INFO)
|
||||||
|
root = logging.getLogger()
|
||||||
|
if root.handlers:
|
||||||
|
return
|
||||||
|
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
formatter = JsonFormatter()
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root.addHandler(handler)
|
||||||
|
root.setLevel(log_level)
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger(name: str) -> logging.Logger:
|
||||||
|
if not logging.getLogger().handlers:
|
||||||
|
configure_logging()
|
||||||
|
return logging.getLogger(name)
|
||||||
72
apps/blockchain-node/src/aitbc_chain/main.py
Normal file
72
apps/blockchain-node/src/aitbc_chain/main.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
from .consensus import PoAProposer, ProposerConfig
|
||||||
|
from .database import init_db, session_scope
|
||||||
|
from .logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockchainNode:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._stop_event = asyncio.Event()
|
||||||
|
self._proposer: Optional[PoAProposer] = None
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
logger.info("Starting blockchain node", extra={"chain_id": settings.chain_id})
|
||||||
|
init_db()
|
||||||
|
self._start_proposer()
|
||||||
|
try:
|
||||||
|
await self._stop_event.wait()
|
||||||
|
finally:
|
||||||
|
await self._shutdown()
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
logger.info("Stopping blockchain node")
|
||||||
|
self._stop_event.set()
|
||||||
|
await self._shutdown()
|
||||||
|
|
||||||
|
def _start_proposer(self) -> None:
|
||||||
|
if self._proposer is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
proposer_config = ProposerConfig(
|
||||||
|
chain_id=settings.chain_id,
|
||||||
|
proposer_id=settings.proposer_id,
|
||||||
|
interval_seconds=settings.block_time_seconds,
|
||||||
|
)
|
||||||
|
self._proposer = PoAProposer(config=proposer_config, session_factory=session_scope)
|
||||||
|
asyncio.create_task(self._proposer.start())
|
||||||
|
|
||||||
|
async def _shutdown(self) -> None:
|
||||||
|
if self._proposer is None:
|
||||||
|
return
|
||||||
|
await self._proposer.stop()
|
||||||
|
self._proposer = None
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def node_app() -> asyncio.AbstractAsyncContextManager[BlockchainNode]: # type: ignore[override]
|
||||||
|
node = BlockchainNode()
|
||||||
|
try:
|
||||||
|
yield node
|
||||||
|
finally:
|
||||||
|
await node.stop()
|
||||||
|
|
||||||
|
|
||||||
|
def run() -> None:
|
||||||
|
asyncio.run(_run())
|
||||||
|
|
||||||
|
|
||||||
|
async def _run() -> None:
|
||||||
|
async with node_app() as node:
|
||||||
|
await node.start()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": # pragma: no cover
|
||||||
|
run()
|
||||||
47
apps/blockchain-node/src/aitbc_chain/mempool.py
Normal file
47
apps/blockchain-node/src/aitbc_chain/mempool.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from threading import Lock
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
from .metrics import metrics_registry
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class PendingTransaction:
|
||||||
|
tx_hash: str
|
||||||
|
content: Dict[str, Any]
|
||||||
|
received_at: float
|
||||||
|
|
||||||
|
|
||||||
|
class InMemoryMempool:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._lock = Lock()
|
||||||
|
self._transactions: Dict[str, PendingTransaction] = {}
|
||||||
|
|
||||||
|
def add(self, tx: Dict[str, Any]) -> str:
|
||||||
|
tx_hash = self._compute_hash(tx)
|
||||||
|
entry = PendingTransaction(tx_hash=tx_hash, content=tx, received_at=time.time())
|
||||||
|
with self._lock:
|
||||||
|
self._transactions[tx_hash] = entry
|
||||||
|
metrics_registry.set_gauge("mempool_size", float(len(self._transactions)))
|
||||||
|
return tx_hash
|
||||||
|
|
||||||
|
def list_transactions(self) -> List[PendingTransaction]:
|
||||||
|
with self._lock:
|
||||||
|
return list(self._transactions.values())
|
||||||
|
|
||||||
|
def _compute_hash(self, tx: Dict[str, Any]) -> str:
|
||||||
|
canonical = json.dumps(tx, sort_keys=True, separators=(",", ":")).encode()
|
||||||
|
digest = hashlib.sha256(canonical).hexdigest()
|
||||||
|
return f"0x{digest}"
|
||||||
|
|
||||||
|
|
||||||
|
_MEMPOOL = InMemoryMempool()
|
||||||
|
|
||||||
|
|
||||||
|
def get_mempool() -> InMemoryMempool:
|
||||||
|
return _MEMPOOL
|
||||||
40
apps/blockchain-node/src/aitbc_chain/metrics.py
Normal file
40
apps/blockchain-node/src/aitbc_chain/metrics.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from threading import Lock
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MetricValue:
|
||||||
|
name: str
|
||||||
|
value: float
|
||||||
|
|
||||||
|
|
||||||
|
class MetricsRegistry:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._counters: Dict[str, float] = {}
|
||||||
|
self._gauges: Dict[str, float] = {}
|
||||||
|
self._lock = Lock()
|
||||||
|
|
||||||
|
def increment(self, name: str, amount: float = 1.0) -> None:
|
||||||
|
with self._lock:
|
||||||
|
self._counters[name] = self._counters.get(name, 0.0) + amount
|
||||||
|
|
||||||
|
def set_gauge(self, name: str, value: float) -> None:
|
||||||
|
with self._lock:
|
||||||
|
self._gauges[name] = value
|
||||||
|
|
||||||
|
def render_prometheus(self) -> str:
|
||||||
|
with self._lock:
|
||||||
|
lines: list[str] = []
|
||||||
|
for name, value in sorted(self._counters.items()):
|
||||||
|
lines.append(f"# TYPE {name} counter")
|
||||||
|
lines.append(f"{name} {value}")
|
||||||
|
for name, value in sorted(self._gauges.items()):
|
||||||
|
lines.append(f"# TYPE {name} gauge")
|
||||||
|
lines.append(f"{name} {value}")
|
||||||
|
return "\n".join(lines) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
metrics_registry = MetricsRegistry()
|
||||||
116
apps/blockchain-node/src/aitbc_chain/models.py
Normal file
116
apps/blockchain-node/src/aitbc_chain/models.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
import re
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import field_validator
|
||||||
|
from sqlalchemy import Column
|
||||||
|
from sqlalchemy.types import JSON
|
||||||
|
from sqlmodel import Field, Relationship, SQLModel
|
||||||
|
|
||||||
|
_HEX_PATTERN = re.compile(r"^(0x)?[0-9a-fA-F]+$")
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_hex(value: str, field_name: str) -> str:
|
||||||
|
if not _HEX_PATTERN.fullmatch(value):
|
||||||
|
raise ValueError(f"{field_name} must be a hex-encoded string")
|
||||||
|
return value.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_optional_hex(value: Optional[str], field_name: str) -> Optional[str]:
|
||||||
|
if value is None:
|
||||||
|
return value
|
||||||
|
return _validate_hex(value, field_name)
|
||||||
|
|
||||||
|
|
||||||
|
class Block(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
height: int = Field(index=True, unique=True)
|
||||||
|
hash: str = Field(index=True, unique=True)
|
||||||
|
parent_hash: str
|
||||||
|
proposer: str
|
||||||
|
timestamp: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
tx_count: int = 0
|
||||||
|
state_root: Optional[str] = None
|
||||||
|
|
||||||
|
transactions: List["Transaction"] = Relationship(back_populates="block")
|
||||||
|
receipts: List["Receipt"] = Relationship(back_populates="block")
|
||||||
|
|
||||||
|
@field_validator("hash", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _hash_is_hex(cls, value: str) -> str:
|
||||||
|
return _validate_hex(value, "Block.hash")
|
||||||
|
|
||||||
|
@field_validator("parent_hash", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _parent_hash_is_hex(cls, value: str) -> str:
|
||||||
|
return _validate_hex(value, "Block.parent_hash")
|
||||||
|
|
||||||
|
@field_validator("state_root", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _state_root_is_hex(cls, value: Optional[str]) -> Optional[str]:
|
||||||
|
return _validate_optional_hex(value, "Block.state_root")
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
tx_hash: str = Field(index=True, unique=True)
|
||||||
|
block_height: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
index=True,
|
||||||
|
foreign_key="block.height",
|
||||||
|
)
|
||||||
|
sender: str
|
||||||
|
recipient: str
|
||||||
|
payload: dict = Field(
|
||||||
|
default_factory=dict,
|
||||||
|
sa_column=Column(JSON, nullable=False),
|
||||||
|
)
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
block: Optional[Block] = Relationship(back_populates="transactions")
|
||||||
|
|
||||||
|
@field_validator("tx_hash", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _tx_hash_is_hex(cls, value: str) -> str:
|
||||||
|
return _validate_hex(value, "Transaction.tx_hash")
|
||||||
|
|
||||||
|
|
||||||
|
class Receipt(SQLModel, table=True):
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
job_id: str = Field(index=True)
|
||||||
|
receipt_id: str = Field(index=True, unique=True)
|
||||||
|
block_height: Optional[int] = Field(
|
||||||
|
default=None,
|
||||||
|
index=True,
|
||||||
|
foreign_key="block.height",
|
||||||
|
)
|
||||||
|
payload: dict = Field(
|
||||||
|
default_factory=dict,
|
||||||
|
sa_column=Column(JSON, nullable=False),
|
||||||
|
)
|
||||||
|
miner_signature: dict = Field(
|
||||||
|
default_factory=dict,
|
||||||
|
sa_column=Column(JSON, nullable=False),
|
||||||
|
)
|
||||||
|
coordinator_attestations: list[dict] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
sa_column=Column(JSON, nullable=False),
|
||||||
|
)
|
||||||
|
minted_amount: Optional[int] = None
|
||||||
|
recorded_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
|
||||||
|
block: Optional[Block] = Relationship(back_populates="receipts")
|
||||||
|
|
||||||
|
@field_validator("receipt_id", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def _receipt_id_is_hex(cls, value: str) -> str:
|
||||||
|
return _validate_hex(value, "Receipt.receipt_id")
|
||||||
|
|
||||||
|
|
||||||
|
class Account(SQLModel, table=True):
|
||||||
|
address: str = Field(primary_key=True)
|
||||||
|
balance: int = 0
|
||||||
|
nonce: int = 0
|
||||||
|
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
184
apps/blockchain-node/src/aitbc_chain/rpc/router.py
Normal file
184
apps/blockchain-node/src/aitbc_chain/rpc/router.py
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, HTTPException, status
|
||||||
|
from pydantic import BaseModel, Field, model_validator
|
||||||
|
from sqlmodel import select
|
||||||
|
|
||||||
|
from ..database import session_scope
|
||||||
|
from ..mempool import get_mempool
|
||||||
|
from ..metrics import metrics_registry
|
||||||
|
from ..models import Account, Block, Receipt, Transaction
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_receipt(receipt: Receipt) -> Dict[str, Any]:
|
||||||
|
return {
|
||||||
|
"receipt_id": receipt.receipt_id,
|
||||||
|
"job_id": receipt.job_id,
|
||||||
|
"payload": receipt.payload,
|
||||||
|
"miner_signature": receipt.miner_signature,
|
||||||
|
"coordinator_attestations": receipt.coordinator_attestations,
|
||||||
|
"minted_amount": receipt.minted_amount,
|
||||||
|
"recorded_at": receipt.recorded_at.isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionRequest(BaseModel):
|
||||||
|
type: str = Field(description="Transaction type, e.g. TRANSFER or RECEIPT_CLAIM")
|
||||||
|
sender: str
|
||||||
|
nonce: int
|
||||||
|
fee: int = Field(ge=0)
|
||||||
|
payload: Dict[str, Any]
|
||||||
|
sig: Optional[str] = Field(default=None, description="Signature payload")
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def normalize_type(self) -> "TransactionRequest": # type: ignore[override]
|
||||||
|
normalized = self.type.upper()
|
||||||
|
if normalized not in {"TRANSFER", "RECEIPT_CLAIM"}:
|
||||||
|
raise ValueError(f"unsupported transaction type: {self.type}")
|
||||||
|
self.type = normalized
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class ReceiptSubmissionRequest(BaseModel):
|
||||||
|
sender: str
|
||||||
|
nonce: int
|
||||||
|
fee: int = Field(ge=0)
|
||||||
|
payload: Dict[str, Any]
|
||||||
|
sig: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class EstimateFeeRequest(BaseModel):
|
||||||
|
type: Optional[str] = None
|
||||||
|
payload: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class MintFaucetRequest(BaseModel):
|
||||||
|
address: str
|
||||||
|
amount: int = Field(gt=0)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/head", summary="Get current chain head")
|
||||||
|
async def get_head() -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
result = session.exec(select(Block).order_by(Block.height.desc()).limit(1)).first()
|
||||||
|
if result is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="no blocks yet")
|
||||||
|
return {
|
||||||
|
"height": result.height,
|
||||||
|
"hash": result.hash,
|
||||||
|
"timestamp": result.timestamp.isoformat(),
|
||||||
|
"tx_count": result.tx_count,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/blocks/{height}", summary="Get block by height")
|
||||||
|
async def get_block(height: int) -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
block = session.exec(select(Block).where(Block.height == height)).first()
|
||||||
|
if block is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="block not found")
|
||||||
|
return {
|
||||||
|
"height": block.height,
|
||||||
|
"hash": block.hash,
|
||||||
|
"parent_hash": block.parent_hash,
|
||||||
|
"timestamp": block.timestamp.isoformat(),
|
||||||
|
"tx_count": block.tx_count,
|
||||||
|
"state_root": block.state_root,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tx/{tx_hash}", summary="Get transaction by hash")
|
||||||
|
async def get_transaction(tx_hash: str) -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
tx = session.exec(select(Transaction).where(Transaction.tx_hash == tx_hash)).first()
|
||||||
|
if tx is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="transaction not found")
|
||||||
|
return {
|
||||||
|
"tx_hash": tx.tx_hash,
|
||||||
|
"block_height": tx.block_height,
|
||||||
|
"sender": tx.sender,
|
||||||
|
"recipient": tx.recipient,
|
||||||
|
"payload": tx.payload,
|
||||||
|
"created_at": tx.created_at.isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/receipts/{receipt_id}", summary="Get receipt by ID")
|
||||||
|
async def get_receipt(receipt_id: str) -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
receipt = session.exec(select(Receipt).where(Receipt.receipt_id == receipt_id)).first()
|
||||||
|
if receipt is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not found")
|
||||||
|
return _serialize_receipt(receipt)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/getBalance/{address}", summary="Get account balance")
|
||||||
|
async def get_balance(address: str) -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
account = session.get(Account, address)
|
||||||
|
if account is None:
|
||||||
|
return {"address": address, "balance": 0, "nonce": 0}
|
||||||
|
return {
|
||||||
|
"address": account.address,
|
||||||
|
"balance": account.balance,
|
||||||
|
"nonce": account.nonce,
|
||||||
|
"updated_at": account.updated_at.isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sendTx", summary="Submit a new transaction")
|
||||||
|
async def send_transaction(request: TransactionRequest) -> Dict[str, Any]:
|
||||||
|
mempool = get_mempool()
|
||||||
|
tx_dict = request.model_dump()
|
||||||
|
tx_hash = mempool.add(tx_dict)
|
||||||
|
metrics_registry.increment("rpc_send_tx_total")
|
||||||
|
return {"tx_hash": tx_hash}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/submitReceipt", summary="Submit receipt claim transaction")
|
||||||
|
async def submit_receipt(request: ReceiptSubmissionRequest) -> Dict[str, Any]:
|
||||||
|
tx_payload = {
|
||||||
|
"type": "RECEIPT_CLAIM",
|
||||||
|
"sender": request.sender,
|
||||||
|
"nonce": request.nonce,
|
||||||
|
"fee": request.fee,
|
||||||
|
"payload": request.payload,
|
||||||
|
"sig": request.sig,
|
||||||
|
}
|
||||||
|
tx_request = TransactionRequest.model_validate(tx_payload)
|
||||||
|
metrics_registry.increment("rpc_submit_receipt_total")
|
||||||
|
return await send_transaction(tx_request)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/estimateFee", summary="Estimate transaction fee")
|
||||||
|
async def estimate_fee(request: EstimateFeeRequest) -> Dict[str, Any]:
|
||||||
|
base_fee = 10
|
||||||
|
per_byte = 1
|
||||||
|
payload_bytes = len(json.dumps(request.payload, sort_keys=True, separators=(",", ":")).encode())
|
||||||
|
estimated_fee = base_fee + per_byte * payload_bytes
|
||||||
|
tx_type = (request.type or "TRANSFER").upper()
|
||||||
|
return {
|
||||||
|
"type": tx_type,
|
||||||
|
"base_fee": base_fee,
|
||||||
|
"payload_bytes": payload_bytes,
|
||||||
|
"estimated_fee": estimated_fee,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/admin/mintFaucet", summary="Mint devnet funds to an address")
|
||||||
|
async def mint_faucet(request: MintFaucetRequest) -> Dict[str, Any]:
|
||||||
|
with session_scope() as session:
|
||||||
|
account = session.get(Account, request.address)
|
||||||
|
if account is None:
|
||||||
|
account = Account(address=request.address, balance=request.amount)
|
||||||
|
session.add(account)
|
||||||
|
else:
|
||||||
|
account.balance += request.amount
|
||||||
|
session.commit()
|
||||||
|
updated_balance = account.balance
|
||||||
|
return {"address": request.address, "balance": updated_balance}
|
||||||
9
apps/client-web/README.md
Normal file
9
apps/client-web/README.md
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Client Web
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
Front-end application that allows users to submit compute jobs, monitor status, and interact with AITBC services. See `docs/bootstrap/dirs.md` and `docs/bootstrap/examples.md` for guidance.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
Implementation pending. Recommended stack: lightweight web framework (per bootstrap doc) without heavy front-end frameworks.
|
||||||
34
apps/coordinator-api/README.md
Normal file
34
apps/coordinator-api/README.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# Coordinator API
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
FastAPI service that accepts client compute jobs, matches miners, and tracks job lifecycle for the AITBC network.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
1. Create a virtual environment in `apps/coordinator-api/.venv`.
|
||||||
|
2. Install dependencies listed in `pyproject.toml` once added.
|
||||||
|
3. Run the FastAPI app via `uvicorn app.main:app --reload`.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Expects environment variables defined in `.env` (see `docs/bootstrap/coordinator_api.md`).
|
||||||
|
|
||||||
|
### Signed receipts (optional)
|
||||||
|
|
||||||
|
- Generate an Ed25519 key:
|
||||||
|
```bash
|
||||||
|
python - <<'PY'
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
sk = SigningKey.generate()
|
||||||
|
print(sk.encode().hex())
|
||||||
|
PY
|
||||||
|
```
|
||||||
|
- Set `RECEIPT_SIGNING_KEY_HEX` in the `.env` file to the printed hex string to enable signed receipts returned by `/v1/miners/{job_id}/result` and retrievable via `/v1/jobs/{job_id}/receipt`.
|
||||||
|
- Receipt history is available at `/v1/jobs/{job_id}/receipts` (requires client API key) and returns all stored signed payloads.
|
||||||
|
- To enable coordinator attestations, set `RECEIPT_ATTESTATION_KEY_HEX` to a separate Ed25519 private key; responses include an `attestations` array alongside the miner signature.
|
||||||
|
- Clients can verify `signature` objects using the `aitbc_crypto` package (see `protocols/receipts/spec.md`).
|
||||||
|
|
||||||
|
## Systemd
|
||||||
|
|
||||||
|
Service name: `aitbc-coordinator-api` (to be defined under `configs/systemd/`).
|
||||||
33
apps/coordinator-api/pyproject.toml
Normal file
33
apps/coordinator-api/pyproject.toml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "aitbc-coordinator-api"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "AITBC Coordinator API service"
|
||||||
|
authors = ["AITBC Team"]
|
||||||
|
packages = [
|
||||||
|
{ include = "app", from = "src" }
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.11"
|
||||||
|
fastapi = "^0.111.0"
|
||||||
|
uvicorn = { extras = ["standard"], version = "^0.30.0" }
|
||||||
|
pydantic = "^2.7.0"
|
||||||
|
pydantic-settings = "^2.2.1"
|
||||||
|
sqlalchemy = "^2.0.30"
|
||||||
|
aiosqlite = "^0.20.0"
|
||||||
|
sqlmodel = "^0.0.16"
|
||||||
|
httpx = "^0.27.0"
|
||||||
|
python-dotenv = "^1.0.1"
|
||||||
|
slowapi = "^0.1.8"
|
||||||
|
orjson = "^3.10.0"
|
||||||
|
gunicorn = "^22.0.0"
|
||||||
|
aitbc-crypto = {path = "../../packages/py/aitbc-crypto"}
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
pytest = "^8.2.0"
|
||||||
|
pytest-asyncio = "^0.23.0"
|
||||||
|
httpx = {extras=["cli"], version="^0.27.0"}
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
1
apps/coordinator-api/src/app/__init__.py
Normal file
1
apps/coordinator-api/src/app/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""AITBC Coordinator API package."""
|
||||||
32
apps/coordinator-api/src/app/config.py
Normal file
32
apps/coordinator-api/src/app/config.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8", case_sensitive=False)
|
||||||
|
|
||||||
|
app_env: str = "dev"
|
||||||
|
app_host: str = "127.0.0.1"
|
||||||
|
app_port: int = 8011
|
||||||
|
|
||||||
|
database_url: str = "sqlite:///./coordinator.db"
|
||||||
|
|
||||||
|
client_api_keys: List[str] = ["client_dev_key_1"]
|
||||||
|
miner_api_keys: List[str] = ["miner_dev_key_1"]
|
||||||
|
admin_api_keys: List[str] = ["admin_dev_key_1"]
|
||||||
|
|
||||||
|
hmac_secret: Optional[str] = None
|
||||||
|
allow_origins: List[str] = ["*"]
|
||||||
|
|
||||||
|
job_ttl_seconds: int = 900
|
||||||
|
heartbeat_interval_seconds: int = 10
|
||||||
|
heartbeat_timeout_seconds: int = 30
|
||||||
|
|
||||||
|
rate_limit_requests: int = 60
|
||||||
|
rate_limit_window_seconds: int = 60
|
||||||
|
|
||||||
|
receipt_signing_key_hex: Optional[str] = None
|
||||||
|
receipt_attestation_key_hex: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
26
apps/coordinator-api/src/app/deps.py
Normal file
26
apps/coordinator-api/src/app/deps.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from typing import Callable
|
||||||
|
from fastapi import Depends, Header, HTTPException
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
|
||||||
|
|
||||||
|
class APIKeyValidator:
|
||||||
|
def __init__(self, allowed_keys: list[str]):
|
||||||
|
self.allowed_keys = {key.strip() for key in allowed_keys if key}
|
||||||
|
|
||||||
|
def __call__(self, api_key: str | None = Header(default=None, alias="X-Api-Key")) -> str:
|
||||||
|
if not api_key or api_key not in self.allowed_keys:
|
||||||
|
raise HTTPException(status_code=401, detail="invalid api key")
|
||||||
|
return api_key
|
||||||
|
|
||||||
|
|
||||||
|
def require_client_key() -> Callable[[str | None], str]:
|
||||||
|
return APIKeyValidator(settings.client_api_keys)
|
||||||
|
|
||||||
|
|
||||||
|
def require_miner_key() -> Callable[[str | None], str]:
|
||||||
|
return APIKeyValidator(settings.miner_api_keys)
|
||||||
|
|
||||||
|
|
||||||
|
def require_admin_key() -> Callable[[str | None], str]:
|
||||||
|
return APIKeyValidator(settings.admin_api_keys)
|
||||||
7
apps/coordinator-api/src/app/domain/__init__.py
Normal file
7
apps/coordinator-api/src/app/domain/__init__.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
"""Domain models for the coordinator API."""
|
||||||
|
|
||||||
|
from .job import Job
|
||||||
|
from .miner import Miner
|
||||||
|
from .job_receipt import JobReceipt
|
||||||
|
|
||||||
|
__all__ = ["Job", "Miner", "JobReceipt"]
|
||||||
30
apps/coordinator-api/src/app/domain/job.py
Normal file
30
apps/coordinator-api/src/app/domain/job.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from sqlalchemy import Column, JSON
|
||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
from ..models import JobState
|
||||||
|
|
||||||
|
|
||||||
|
class Job(SQLModel, table=True):
|
||||||
|
id: str = Field(default_factory=lambda: uuid4().hex, primary_key=True, index=True)
|
||||||
|
client_id: str = Field(index=True)
|
||||||
|
|
||||||
|
state: JobState = Field(default=JobState.queued, sa_column_kwargs={"nullable": False})
|
||||||
|
payload: dict = Field(sa_column=Column(JSON, nullable=False))
|
||||||
|
constraints: dict = Field(default_factory=dict, sa_column=Column(JSON, nullable=False))
|
||||||
|
|
||||||
|
ttl_seconds: int = Field(default=900)
|
||||||
|
requested_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
expires_at: datetime = Field(default_factory=datetime.utcnow)
|
||||||
|
|
||||||
|
assigned_miner_id: Optional[str] = Field(default=None, index=True)
|
||||||
|
|
||||||
|
result: Optional[dict] = Field(default=None, sa_column=Column(JSON, nullable=True))
|
||||||
|
receipt: Optional[dict] = Field(default=None, sa_column=Column(JSON, nullable=True))
|
||||||
|
receipt_id: Optional[str] = Field(default=None, index=True)
|
||||||
|
error: Optional[str] = None
|
||||||
15
apps/coordinator-api/src/app/domain/job_receipt.py
Normal file
15
apps/coordinator-api/src/app/domain/job_receipt.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from sqlalchemy import Column, JSON
|
||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class JobReceipt(SQLModel, table=True):
|
||||||
|
id: str = Field(default_factory=lambda: uuid4().hex, primary_key=True, index=True)
|
||||||
|
job_id: str = Field(index=True, foreign_key="job.id")
|
||||||
|
receipt_id: str = Field(index=True)
|
||||||
|
payload: dict = Field(sa_column=Column(JSON, nullable=False))
|
||||||
|
created_at: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
25
apps/coordinator-api/src/app/domain/miner.py
Normal file
25
apps/coordinator-api/src/app/domain/miner.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Column, JSON
|
||||||
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class Miner(SQLModel, table=True):
|
||||||
|
id: str = Field(primary_key=True, index=True)
|
||||||
|
region: Optional[str] = Field(default=None, index=True)
|
||||||
|
capabilities: dict = Field(default_factory=dict, sa_column=Column(JSON, nullable=False))
|
||||||
|
concurrency: int = Field(default=1)
|
||||||
|
status: str = Field(default="ONLINE", index=True)
|
||||||
|
inflight: int = Field(default=0)
|
||||||
|
extra_metadata: dict = Field(default_factory=dict, sa_column=Column(JSON, nullable=False))
|
||||||
|
last_heartbeat: datetime = Field(default_factory=datetime.utcnow, index=True)
|
||||||
|
session_token: Optional[str] = None
|
||||||
|
last_job_at: Optional[datetime] = Field(default=None, index=True)
|
||||||
|
jobs_completed: int = Field(default=0)
|
||||||
|
jobs_failed: int = Field(default=0)
|
||||||
|
total_job_duration_ms: int = Field(default=0)
|
||||||
|
average_job_duration_ms: float = Field(default=0.0)
|
||||||
|
last_receipt_id: Optional[str] = Field(default=None, index=True)
|
||||||
34
apps/coordinator-api/src/app/main.py
Normal file
34
apps/coordinator-api/src/app/main.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from .config import settings
|
||||||
|
from .routers import client, miner, admin
|
||||||
|
|
||||||
|
|
||||||
|
def create_app() -> FastAPI:
|
||||||
|
app = FastAPI(
|
||||||
|
title="AITBC Coordinator API",
|
||||||
|
version="0.1.0",
|
||||||
|
description="Stage 1 coordinator service handling job orchestration between clients and miners.",
|
||||||
|
)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=settings.allow_origins,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"]
|
||||||
|
)
|
||||||
|
|
||||||
|
app.include_router(client.router, prefix="/v1")
|
||||||
|
app.include_router(miner.router, prefix="/v1")
|
||||||
|
app.include_router(admin.router, prefix="/v1")
|
||||||
|
|
||||||
|
@app.get("/v1/health", tags=["health"], summary="Service healthcheck")
|
||||||
|
async def health() -> dict[str, str]:
|
||||||
|
return {"status": "ok", "env": settings.app_env}
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
78
apps/coordinator-api/src/app/models.py
Normal file
78
apps/coordinator-api/src/app/models.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class JobState(str, Enum):
|
||||||
|
queued = "QUEUED"
|
||||||
|
running = "RUNNING"
|
||||||
|
completed = "COMPLETED"
|
||||||
|
failed = "FAILED"
|
||||||
|
canceled = "CANCELED"
|
||||||
|
expired = "EXPIRED"
|
||||||
|
|
||||||
|
|
||||||
|
class Constraints(BaseModel):
|
||||||
|
gpu: Optional[str] = None
|
||||||
|
cuda: Optional[str] = None
|
||||||
|
min_vram_gb: Optional[int] = None
|
||||||
|
models: Optional[list[str]] = None
|
||||||
|
region: Optional[str] = None
|
||||||
|
max_price: Optional[float] = None
|
||||||
|
|
||||||
|
|
||||||
|
class JobCreate(BaseModel):
|
||||||
|
payload: Dict[str, Any]
|
||||||
|
constraints: Constraints = Field(default_factory=Constraints)
|
||||||
|
ttl_seconds: int = 900
|
||||||
|
|
||||||
|
|
||||||
|
class JobView(BaseModel):
|
||||||
|
job_id: str
|
||||||
|
state: JobState
|
||||||
|
assigned_miner_id: Optional[str] = None
|
||||||
|
requested_at: datetime
|
||||||
|
expires_at: datetime
|
||||||
|
error: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class JobResult(BaseModel):
|
||||||
|
result: Optional[Dict[str, Any]] = None
|
||||||
|
receipt: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MinerRegister(BaseModel):
|
||||||
|
capabilities: Dict[str, Any]
|
||||||
|
concurrency: int = 1
|
||||||
|
region: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class MinerHeartbeat(BaseModel):
|
||||||
|
inflight: int = 0
|
||||||
|
status: str = "ONLINE"
|
||||||
|
metadata: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class PollRequest(BaseModel):
|
||||||
|
max_wait_seconds: int = 15
|
||||||
|
|
||||||
|
|
||||||
|
class AssignedJob(BaseModel):
|
||||||
|
job_id: str
|
||||||
|
payload: Dict[str, Any]
|
||||||
|
constraints: Constraints
|
||||||
|
|
||||||
|
|
||||||
|
class JobResultSubmit(BaseModel):
|
||||||
|
result: Dict[str, Any]
|
||||||
|
metrics: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class JobFailSubmit(BaseModel):
|
||||||
|
error_code: str
|
||||||
|
error_message: str
|
||||||
|
metrics: Dict[str, Any] = Field(default_factory=dict)
|
||||||
1
apps/coordinator-api/src/app/routers/__init__.py
Normal file
1
apps/coordinator-api/src/app/routers/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""Router modules for the coordinator API."""
|
||||||
69
apps/coordinator-api/src/app/routers/admin.py
Normal file
69
apps/coordinator-api/src/app/routers/admin.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
|
from ..deps import require_admin_key
|
||||||
|
from ..services import JobService, MinerService
|
||||||
|
from ..storage import SessionDep
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/admin", tags=["admin"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/stats", summary="Get coordinator stats")
|
||||||
|
async def get_stats(session: SessionDep, admin_key: str = Depends(require_admin_key())) -> dict[str, int]: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
from sqlmodel import func, select
|
||||||
|
from ..domain import Job
|
||||||
|
|
||||||
|
total_jobs = session.exec(select(func.count()).select_from(Job)).one()
|
||||||
|
active_jobs = session.exec(select(func.count()).select_from(Job).where(Job.state.in_(["QUEUED", "RUNNING"]))).one()
|
||||||
|
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
miners = miner_service.list_records()
|
||||||
|
avg_job_duration = (
|
||||||
|
sum(miner.average_job_duration_ms for miner in miners if miner.average_job_duration_ms) / max(len(miners), 1)
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"total_jobs": int(total_jobs or 0),
|
||||||
|
"active_jobs": int(active_jobs or 0),
|
||||||
|
"online_miners": miner_service.online_count(),
|
||||||
|
"avg_miner_job_duration_ms": avg_job_duration,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/jobs", summary="List jobs")
|
||||||
|
async def list_jobs(session: SessionDep, admin_key: str = Depends(require_admin_key())) -> dict[str, list[dict]]: # type: ignore[arg-type]
|
||||||
|
from ..domain import Job
|
||||||
|
|
||||||
|
jobs = session.exec(select(Job).order_by(Job.requested_at.desc()).limit(100)).all()
|
||||||
|
return {
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"job_id": job.id,
|
||||||
|
"state": job.state,
|
||||||
|
"client_id": job.client_id,
|
||||||
|
"assigned_miner_id": job.assigned_miner_id,
|
||||||
|
"requested_at": job.requested_at.isoformat(),
|
||||||
|
}
|
||||||
|
for job in jobs
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/miners", summary="List miners")
|
||||||
|
async def list_miners(session: SessionDep, admin_key: str = Depends(require_admin_key())) -> dict[str, list[dict]]: # type: ignore[arg-type]
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
miners = [
|
||||||
|
{
|
||||||
|
"miner_id": record.miner_id,
|
||||||
|
"status": record.status,
|
||||||
|
"inflight": record.inflight,
|
||||||
|
"concurrency": record.concurrency,
|
||||||
|
"region": record.region,
|
||||||
|
"last_heartbeat": record.last_heartbeat.isoformat(),
|
||||||
|
"average_job_duration_ms": record.average_job_duration_ms,
|
||||||
|
"jobs_completed": record.jobs_completed,
|
||||||
|
"jobs_failed": record.jobs_failed,
|
||||||
|
"last_receipt_id": record.last_receipt_id,
|
||||||
|
}
|
||||||
|
for record in miner_service.list_records()
|
||||||
|
]
|
||||||
|
return {"items": miners}
|
||||||
97
apps/coordinator-api/src/app/routers/client.py
Normal file
97
apps/coordinator-api/src/app/routers/client.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
|
from ..deps import require_client_key
|
||||||
|
from ..models import JobCreate, JobView, JobResult
|
||||||
|
from ..services import JobService
|
||||||
|
from ..storage import SessionDep
|
||||||
|
|
||||||
|
router = APIRouter(tags=["client"])
|
||||||
|
|
||||||
|
@router.post("/jobs", response_model=JobView, status_code=status.HTTP_201_CREATED, summary="Submit a job")
|
||||||
|
async def submit_job(
|
||||||
|
req: JobCreate,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> JobView: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
job = service.create_job(client_id, req)
|
||||||
|
return service.to_view(job)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/jobs/{job_id}", response_model=JobView, summary="Get job status")
|
||||||
|
async def get_job(
|
||||||
|
job_id: str,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> JobView: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
try:
|
||||||
|
job = service.get_job(job_id, client_id=client_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
return service.to_view(job)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/jobs/{job_id}/result", response_model=JobResult, summary="Get job result")
|
||||||
|
async def get_job_result(
|
||||||
|
job_id: str,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> JobResult: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
try:
|
||||||
|
job = service.get_job(job_id, client_id=client_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
|
||||||
|
if job.state not in {JobState.completed, JobState.failed, JobState.canceled, JobState.expired}:
|
||||||
|
raise HTTPException(status_code=status.HTTP_425_TOO_EARLY, detail="job not ready")
|
||||||
|
if job.result is None and job.receipt is None:
|
||||||
|
raise HTTPException(status_code=status.HTTP_425_TOO_EARLY, detail="job not ready")
|
||||||
|
return service.to_result(job)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/jobs/{job_id}/cancel", response_model=JobView, summary="Cancel job")
|
||||||
|
async def cancel_job(
|
||||||
|
job_id: str,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> JobView: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
try:
|
||||||
|
job = service.get_job(job_id, client_id=client_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
|
||||||
|
if job.state not in {JobState.queued, JobState.running}:
|
||||||
|
raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="job not cancelable")
|
||||||
|
|
||||||
|
job = service.cancel_job(job)
|
||||||
|
return service.to_view(job)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/jobs/{job_id}/receipt", summary="Get latest signed receipt")
|
||||||
|
async def get_job_receipt(
|
||||||
|
job_id: str,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> dict: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
try:
|
||||||
|
job = service.get_job(job_id, client_id=client_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
if not job.receipt:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="receipt not available")
|
||||||
|
return job.receipt
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/jobs/{job_id}/receipts", summary="List signed receipts")
|
||||||
|
async def list_job_receipts(
|
||||||
|
job_id: str,
|
||||||
|
session: SessionDep,
|
||||||
|
client_id: str = Depends(require_client_key()),
|
||||||
|
) -> dict: # type: ignore[arg-type]
|
||||||
|
service = JobService(session)
|
||||||
|
receipts = service.list_receipts(job_id, client_id=client_id)
|
||||||
|
return {"items": [row.payload for row in receipts]}
|
||||||
110
apps/coordinator-api/src/app/routers/miner.py
Normal file
110
apps/coordinator-api/src/app/routers/miner.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, Response, status
|
||||||
|
|
||||||
|
from ..deps import require_miner_key
|
||||||
|
from ..models import AssignedJob, JobFailSubmit, JobResultSubmit, JobState, MinerHeartbeat, MinerRegister, PollRequest
|
||||||
|
from ..services import JobService, MinerService
|
||||||
|
from ..services.receipts import ReceiptService
|
||||||
|
from ..storage import SessionDep
|
||||||
|
|
||||||
|
router = APIRouter(tags=["miner"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/miners/register", summary="Register or update miner")
|
||||||
|
async def register(
|
||||||
|
req: MinerRegister,
|
||||||
|
session: SessionDep,
|
||||||
|
miner_id: str = Depends(require_miner_key()),
|
||||||
|
) -> dict[str, Any]: # type: ignore[arg-type]
|
||||||
|
service = MinerService(session)
|
||||||
|
record = service.register(miner_id, req)
|
||||||
|
return {"status": "ok", "session_token": record.session_token}
|
||||||
|
|
||||||
|
@router.post("/miners/heartbeat", summary="Send miner heartbeat")
|
||||||
|
async def heartbeat(
|
||||||
|
req: MinerHeartbeat,
|
||||||
|
session: SessionDep,
|
||||||
|
miner_id: str = Depends(require_miner_key()),
|
||||||
|
) -> dict[str, str]: # type: ignore[arg-type]
|
||||||
|
try:
|
||||||
|
MinerService(session).heartbeat(miner_id, req)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="miner not registered")
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: until scheduling is fully implemented the poll endpoint performs a simple FIFO assignment.
|
||||||
|
@router.post("/miners/poll", response_model=AssignedJob, summary="Poll for next job")
|
||||||
|
async def poll(
|
||||||
|
req: PollRequest,
|
||||||
|
session: SessionDep,
|
||||||
|
miner_id: str = Depends(require_miner_key()),
|
||||||
|
) -> AssignedJob | Response: # type: ignore[arg-type]
|
||||||
|
job = MinerService(session).poll(miner_id, req.max_wait_seconds)
|
||||||
|
if job is None:
|
||||||
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
return job
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/miners/{job_id}/result", summary="Submit job result")
|
||||||
|
async def submit_result(
|
||||||
|
job_id: str,
|
||||||
|
req: JobResultSubmit,
|
||||||
|
session: SessionDep,
|
||||||
|
miner_id: str = Depends(require_miner_key()),
|
||||||
|
) -> dict[str, Any]: # type: ignore[arg-type]
|
||||||
|
job_service = JobService(session)
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
receipt_service = ReceiptService(session)
|
||||||
|
try:
|
||||||
|
job = job_service.get_job(job_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
|
||||||
|
job.result = req.result
|
||||||
|
job.state = JobState.completed
|
||||||
|
job.error = None
|
||||||
|
|
||||||
|
metrics = dict(req.metrics or {})
|
||||||
|
duration_ms = metrics.get("duration_ms")
|
||||||
|
if duration_ms is None and job.requested_at:
|
||||||
|
duration_ms = int((datetime.utcnow() - job.requested_at).total_seconds() * 1000)
|
||||||
|
metrics["duration_ms"] = duration_ms
|
||||||
|
|
||||||
|
receipt = receipt_service.create_receipt(job, miner_id, req.result, metrics)
|
||||||
|
job.receipt = receipt
|
||||||
|
job.receipt_id = receipt["receipt_id"] if receipt else None
|
||||||
|
session.add(job)
|
||||||
|
session.commit()
|
||||||
|
miner_service.release(
|
||||||
|
miner_id,
|
||||||
|
success=True,
|
||||||
|
duration_ms=duration_ms,
|
||||||
|
receipt_id=receipt["receipt_id"] if receipt else None,
|
||||||
|
)
|
||||||
|
return {"status": "ok", "receipt": receipt}
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/miners/{job_id}/fail", summary="Submit job failure")
|
||||||
|
async def submit_failure(
|
||||||
|
job_id: str,
|
||||||
|
req: JobFailSubmit,
|
||||||
|
session: SessionDep,
|
||||||
|
miner_id: str = Depends(require_miner_key()),
|
||||||
|
) -> dict[str, str]: # type: ignore[arg-type]
|
||||||
|
job_service = JobService(session)
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
try:
|
||||||
|
job = job_service.get_job(job_id)
|
||||||
|
except KeyError:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="job not found")
|
||||||
|
|
||||||
|
job.state = JobState.failed
|
||||||
|
job.error = f"{req.error_code}: {req.error_message}"
|
||||||
|
job.assigned_miner_id = miner_id
|
||||||
|
session.add(job)
|
||||||
|
session.commit()
|
||||||
|
miner_service.release(miner_id, success=False)
|
||||||
|
return {"status": "ok"}
|
||||||
6
apps/coordinator-api/src/app/services/__init__.py
Normal file
6
apps/coordinator-api/src/app/services/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"""Service layer for coordinator business logic."""
|
||||||
|
|
||||||
|
from .jobs import JobService
|
||||||
|
from .miners import MinerService
|
||||||
|
|
||||||
|
__all__ = ["JobService", "MinerService"]
|
||||||
156
apps/coordinator-api/src/app/services/jobs.py
Normal file
156
apps/coordinator-api/src/app/services/jobs.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from ..domain import Job, Miner, JobReceipt
|
||||||
|
from ..models import AssignedJob, Constraints, JobCreate, JobResult, JobState, JobView
|
||||||
|
|
||||||
|
|
||||||
|
class JobService:
|
||||||
|
def __init__(self, session: Session):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
def create_job(self, client_id: str, req: JobCreate) -> Job:
|
||||||
|
ttl = max(req.ttl_seconds, 1)
|
||||||
|
now = datetime.utcnow()
|
||||||
|
job = Job(
|
||||||
|
client_id=client_id,
|
||||||
|
payload=req.payload,
|
||||||
|
constraints=req.constraints.model_dump(exclude_none=True),
|
||||||
|
ttl_seconds=ttl,
|
||||||
|
requested_at=now,
|
||||||
|
expires_at=now + timedelta(seconds=ttl),
|
||||||
|
)
|
||||||
|
self.session.add(job)
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
def get_job(self, job_id: str, client_id: Optional[str] = None) -> Job:
|
||||||
|
query = select(Job).where(Job.id == job_id)
|
||||||
|
if client_id:
|
||||||
|
query = query.where(Job.client_id == client_id)
|
||||||
|
job = self.session.exec(query).one_or_none()
|
||||||
|
if not job:
|
||||||
|
raise KeyError("job not found")
|
||||||
|
return self._ensure_not_expired(job)
|
||||||
|
|
||||||
|
def list_receipts(self, job_id: str, client_id: Optional[str] = None) -> list[JobReceipt]:
|
||||||
|
job = self.get_job(job_id, client_id=client_id)
|
||||||
|
receipts = self.session.exec(
|
||||||
|
select(JobReceipt)
|
||||||
|
.where(JobReceipt.job_id == job.id)
|
||||||
|
.order_by(JobReceipt.created_at.asc())
|
||||||
|
).all()
|
||||||
|
return receipts
|
||||||
|
|
||||||
|
def cancel_job(self, job: Job) -> Job:
|
||||||
|
if job.state not in {JobState.queued, JobState.running}:
|
||||||
|
return job
|
||||||
|
job.state = JobState.canceled
|
||||||
|
job.error = "canceled by client"
|
||||||
|
job.assigned_miner_id = None
|
||||||
|
self.session.add(job)
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
def to_view(self, job: Job) -> JobView:
|
||||||
|
return JobView(
|
||||||
|
job_id=job.id,
|
||||||
|
state=job.state,
|
||||||
|
assigned_miner_id=job.assigned_miner_id,
|
||||||
|
requested_at=job.requested_at,
|
||||||
|
expires_at=job.expires_at,
|
||||||
|
error=job.error,
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_result(self, job: Job) -> JobResult:
|
||||||
|
return JobResult(result=job.result, receipt=job.receipt)
|
||||||
|
|
||||||
|
def to_assigned(self, job: Job) -> AssignedJob:
|
||||||
|
constraints = Constraints(**job.constraints) if isinstance(job.constraints, dict) else Constraints()
|
||||||
|
return AssignedJob(job_id=job.id, payload=job.payload, constraints=constraints)
|
||||||
|
|
||||||
|
def acquire_next_job(self, miner: Miner) -> Optional[Job]:
|
||||||
|
now = datetime.utcnow()
|
||||||
|
statement = (
|
||||||
|
select(Job)
|
||||||
|
.where(Job.state == JobState.queued)
|
||||||
|
.order_by(Job.requested_at.asc())
|
||||||
|
)
|
||||||
|
|
||||||
|
jobs = self.session.exec(statement).all()
|
||||||
|
for job in jobs:
|
||||||
|
job = self._ensure_not_expired(job)
|
||||||
|
if job.state != JobState.queued:
|
||||||
|
continue
|
||||||
|
if job.expires_at <= now:
|
||||||
|
continue
|
||||||
|
if not self._satisfies_constraints(job, miner):
|
||||||
|
continue
|
||||||
|
job.state = JobState.running
|
||||||
|
job.assigned_miner_id = miner.id
|
||||||
|
self.session.add(job)
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(job)
|
||||||
|
return job
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _ensure_not_expired(self, job: Job) -> Job:
|
||||||
|
if job.state == JobState.queued and job.expires_at <= datetime.utcnow():
|
||||||
|
job.state = JobState.expired
|
||||||
|
job.error = "job expired"
|
||||||
|
self.session.add(job)
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(job)
|
||||||
|
return job
|
||||||
|
|
||||||
|
def _satisfies_constraints(self, job: Job, miner: Miner) -> bool:
|
||||||
|
if not job.constraints:
|
||||||
|
return True
|
||||||
|
constraints = Constraints(**job.constraints)
|
||||||
|
capabilities = miner.capabilities or {}
|
||||||
|
|
||||||
|
# Region matching
|
||||||
|
if constraints.region and constraints.region != miner.region:
|
||||||
|
return False
|
||||||
|
|
||||||
|
gpu_specs = capabilities.get("gpus", []) or []
|
||||||
|
has_gpu = bool(gpu_specs)
|
||||||
|
|
||||||
|
if constraints.gpu:
|
||||||
|
if not has_gpu:
|
||||||
|
return False
|
||||||
|
names = [gpu.get("name") for gpu in gpu_specs]
|
||||||
|
if constraints.gpu not in names:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if constraints.min_vram_gb:
|
||||||
|
required_mb = constraints.min_vram_gb * 1024
|
||||||
|
if not any((gpu.get("memory_mb") or 0) >= required_mb for gpu in gpu_specs):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if constraints.cuda:
|
||||||
|
cuda_info = capabilities.get("cuda")
|
||||||
|
if not cuda_info or constraints.cuda not in str(cuda_info):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if constraints.models:
|
||||||
|
available_models = capabilities.get("models", [])
|
||||||
|
if not set(constraints.models).issubset(set(available_models)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if constraints.max_price is not None:
|
||||||
|
price = capabilities.get("price")
|
||||||
|
try:
|
||||||
|
price_value = float(price)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return False
|
||||||
|
if price_value > constraints.max_price:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
110
apps/coordinator-api/src/app/services/miners.py
Normal file
110
apps/coordinator-api/src/app/services/miners.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
|
from ..domain import Miner
|
||||||
|
from ..models import AssignedJob, MinerHeartbeat, MinerRegister
|
||||||
|
from .jobs import JobService
|
||||||
|
|
||||||
|
|
||||||
|
class MinerService:
|
||||||
|
def __init__(self, session: Session):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
def register(self, miner_id: str, payload: MinerRegister) -> Miner:
|
||||||
|
miner = self.session.get(Miner, miner_id)
|
||||||
|
session_token = uuid4().hex
|
||||||
|
if miner is None:
|
||||||
|
miner = Miner(
|
||||||
|
id=miner_id,
|
||||||
|
capabilities=payload.capabilities,
|
||||||
|
concurrency=payload.concurrency,
|
||||||
|
region=payload.region,
|
||||||
|
session_token=session_token,
|
||||||
|
)
|
||||||
|
self.session.add(miner)
|
||||||
|
else:
|
||||||
|
miner.capabilities = payload.capabilities
|
||||||
|
miner.concurrency = payload.concurrency
|
||||||
|
miner.region = payload.region
|
||||||
|
miner.session_token = session_token
|
||||||
|
miner.last_heartbeat = datetime.utcnow()
|
||||||
|
miner.status = "ONLINE"
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(miner)
|
||||||
|
return miner
|
||||||
|
|
||||||
|
def heartbeat(self, miner_id: str, payload: MinerHeartbeat | dict) -> Miner:
|
||||||
|
if not isinstance(payload, MinerHeartbeat):
|
||||||
|
payload = MinerHeartbeat.model_validate(payload)
|
||||||
|
miner = self.session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
raise KeyError("miner not registered")
|
||||||
|
miner.inflight = payload.inflight
|
||||||
|
miner.status = payload.status
|
||||||
|
miner.extra_metadata = payload.metadata
|
||||||
|
miner.last_heartbeat = datetime.utcnow()
|
||||||
|
self.session.add(miner)
|
||||||
|
self.session.commit()
|
||||||
|
self.session.refresh(miner)
|
||||||
|
return miner
|
||||||
|
|
||||||
|
def poll(self, miner_id: str, max_wait_seconds: int) -> Optional[AssignedJob]:
|
||||||
|
miner = self.session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
raise KeyError("miner not registered")
|
||||||
|
if miner.concurrency and miner.inflight >= miner.concurrency:
|
||||||
|
return None
|
||||||
|
|
||||||
|
job_service = JobService(self.session)
|
||||||
|
job = job_service.acquire_next_job(miner)
|
||||||
|
if not job:
|
||||||
|
return None
|
||||||
|
|
||||||
|
miner.inflight += 1
|
||||||
|
miner.last_heartbeat = datetime.utcnow()
|
||||||
|
miner.last_job_at = datetime.utcnow()
|
||||||
|
self.session.add(miner)
|
||||||
|
self.session.commit()
|
||||||
|
return job_service.to_assigned(job)
|
||||||
|
|
||||||
|
def release(
|
||||||
|
self,
|
||||||
|
miner_id: str,
|
||||||
|
success: bool | None = None,
|
||||||
|
duration_ms: int | None = None,
|
||||||
|
receipt_id: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
miner = self.session.get(Miner, miner_id)
|
||||||
|
if miner:
|
||||||
|
miner.inflight = max(0, miner.inflight - 1)
|
||||||
|
if success is True:
|
||||||
|
miner.jobs_completed += 1
|
||||||
|
if duration_ms is not None:
|
||||||
|
miner.total_job_duration_ms += duration_ms
|
||||||
|
miner.average_job_duration_ms = (
|
||||||
|
miner.total_job_duration_ms / max(miner.jobs_completed, 1)
|
||||||
|
)
|
||||||
|
elif success is False:
|
||||||
|
miner.jobs_failed += 1
|
||||||
|
if receipt_id:
|
||||||
|
miner.last_receipt_id = receipt_id
|
||||||
|
self.session.add(miner)
|
||||||
|
self.session.commit()
|
||||||
|
|
||||||
|
def get(self, miner_id: str) -> Miner:
|
||||||
|
miner = self.session.get(Miner, miner_id)
|
||||||
|
if miner is None:
|
||||||
|
raise KeyError("miner not registered")
|
||||||
|
return miner
|
||||||
|
|
||||||
|
def list_records(self) -> list[Miner]:
|
||||||
|
return list(self.session.exec(select(Miner)).all())
|
||||||
|
|
||||||
|
def online_count(self) -> int:
|
||||||
|
result = self.session.exec(select(Miner).where(Miner.status == "ONLINE"))
|
||||||
|
return len(result.all())
|
||||||
79
apps/coordinator-api/src/app/services/receipts.py
Normal file
79
apps/coordinator-api/src/app/services/receipts.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
from secrets import token_hex
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from aitbc_crypto.signing import ReceiptSigner
|
||||||
|
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
|
from ..config import settings
|
||||||
|
from ..domain import Job, JobReceipt
|
||||||
|
|
||||||
|
|
||||||
|
class ReceiptService:
|
||||||
|
def __init__(self, session: Session) -> None:
|
||||||
|
self.session = session
|
||||||
|
self._signer: Optional[ReceiptSigner] = None
|
||||||
|
self._attestation_signer: Optional[ReceiptSigner] = None
|
||||||
|
if settings.receipt_signing_key_hex:
|
||||||
|
key_bytes = bytes.fromhex(settings.receipt_signing_key_hex)
|
||||||
|
self._signer = ReceiptSigner(key_bytes)
|
||||||
|
if settings.receipt_attestation_key_hex:
|
||||||
|
attest_bytes = bytes.fromhex(settings.receipt_attestation_key_hex)
|
||||||
|
self._attestation_signer = ReceiptSigner(attest_bytes)
|
||||||
|
|
||||||
|
def create_receipt(
|
||||||
|
self,
|
||||||
|
job: Job,
|
||||||
|
miner_id: str,
|
||||||
|
job_result: Dict[str, Any] | None,
|
||||||
|
result_metrics: Dict[str, Any] | None,
|
||||||
|
) -> Dict[str, Any] | None:
|
||||||
|
if self._signer is None:
|
||||||
|
return None
|
||||||
|
payload = {
|
||||||
|
"version": "1.0",
|
||||||
|
"receipt_id": token_hex(16),
|
||||||
|
"job_id": job.id,
|
||||||
|
"provider": miner_id,
|
||||||
|
"client": job.client_id,
|
||||||
|
"units": _first_present([
|
||||||
|
(result_metrics or {}).get("units"),
|
||||||
|
(job_result or {}).get("units"),
|
||||||
|
], default=0.0),
|
||||||
|
"unit_type": _first_present([
|
||||||
|
(result_metrics or {}).get("unit_type"),
|
||||||
|
(job_result or {}).get("unit_type"),
|
||||||
|
], default="gpu_seconds"),
|
||||||
|
"price": _first_present([
|
||||||
|
(result_metrics or {}).get("price"),
|
||||||
|
(job_result or {}).get("price"),
|
||||||
|
]),
|
||||||
|
"started_at": int(job.requested_at.timestamp()) if job.requested_at else int(datetime.utcnow().timestamp()),
|
||||||
|
"completed_at": int(datetime.utcnow().timestamp()),
|
||||||
|
"metadata": {
|
||||||
|
"job_payload": job.payload,
|
||||||
|
"job_constraints": job.constraints,
|
||||||
|
"result": job_result,
|
||||||
|
"metrics": result_metrics,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
payload["signature"] = self._signer.sign(payload)
|
||||||
|
if self._attestation_signer:
|
||||||
|
payload.setdefault("attestations", [])
|
||||||
|
attestation_payload = dict(payload)
|
||||||
|
attestation_payload.pop("attestations", None)
|
||||||
|
attestation_payload.pop("signature", None)
|
||||||
|
payload["attestations"].append(self._attestation_signer.sign(attestation_payload))
|
||||||
|
receipt_row = JobReceipt(job_id=job.id, receipt_id=payload["receipt_id"], payload=payload)
|
||||||
|
self.session.add(receipt_row)
|
||||||
|
return payload
|
||||||
|
|
||||||
|
|
||||||
|
def _first_present(values: list[Optional[Any]], default: Optional[Any] = None) -> Optional[Any]:
|
||||||
|
for value in values:
|
||||||
|
if value is not None:
|
||||||
|
return value
|
||||||
|
return default
|
||||||
5
apps/coordinator-api/src/app/storage/__init__.py
Normal file
5
apps/coordinator-api/src/app/storage/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
"""Persistence helpers for the coordinator API."""
|
||||||
|
|
||||||
|
from .db import SessionDep, get_session, init_db
|
||||||
|
|
||||||
|
__all__ = ["SessionDep", "get_session", "init_db"]
|
||||||
42
apps/coordinator-api/src/app/storage/db.py
Normal file
42
apps/coordinator-api/src/app/storage/db.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import Annotated, Generator
|
||||||
|
|
||||||
|
from fastapi import Depends
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlmodel import Session, SQLModel, create_engine
|
||||||
|
|
||||||
|
from ..config import settings
|
||||||
|
from ..domain import Job, Miner
|
||||||
|
|
||||||
|
_engine: Engine | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine() -> Engine:
|
||||||
|
global _engine
|
||||||
|
|
||||||
|
if _engine is None:
|
||||||
|
connect_args = {"check_same_thread": False} if settings.database_url.startswith("sqlite") else {}
|
||||||
|
_engine = create_engine(settings.database_url, echo=False, connect_args=connect_args)
|
||||||
|
return _engine
|
||||||
|
|
||||||
|
|
||||||
|
def init_db() -> None:
|
||||||
|
engine = get_engine()
|
||||||
|
SQLModel.metadata.create_all(engine)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def session_scope() -> Generator[Session, None, None]:
|
||||||
|
engine = get_engine()
|
||||||
|
with Session(engine) as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
def get_session() -> Generator[Session, None, None]:
|
||||||
|
with session_scope() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
SessionDep = Annotated[Session, Depends(get_session)]
|
||||||
77
apps/coordinator-api/tests/test_client_receipts.py
Normal file
77
apps/coordinator-api/tests/test_client_receipts.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
|
from app.main import create_app
|
||||||
|
from app.models import JobCreate, MinerRegister, JobResultSubmit
|
||||||
|
from app.storage.db import init_db
|
||||||
|
from app.config import settings
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module", autouse=True)
|
||||||
|
def test_client(tmp_path_factory):
|
||||||
|
db_file = tmp_path_factory.mktemp("data") / "client_receipts.db"
|
||||||
|
settings.database_url = f"sqlite:///{db_file}"
|
||||||
|
init_db()
|
||||||
|
app = create_app()
|
||||||
|
with TestClient(app) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
def test_receipt_endpoint_returns_signed_receipt(test_client: TestClient):
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
settings.receipt_signing_key_hex = signing_key.encode().hex()
|
||||||
|
|
||||||
|
# register miner
|
||||||
|
resp = test_client.post(
|
||||||
|
"/v1/miners/register",
|
||||||
|
json={"capabilities": {"price": 1}, "concurrency": 1},
|
||||||
|
headers={"X-Api-Key": "miner_dev_key_1"},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
# submit job
|
||||||
|
job_payload = {
|
||||||
|
"payload": {"task": "receipt"},
|
||||||
|
}
|
||||||
|
resp = test_client.post(
|
||||||
|
"/v1/jobs",
|
||||||
|
json=job_payload,
|
||||||
|
headers={"X-Api-Key": "client_dev_key_1"},
|
||||||
|
)
|
||||||
|
assert resp.status_code == 201
|
||||||
|
job_id = resp.json()["job_id"]
|
||||||
|
|
||||||
|
# poll for job assignment
|
||||||
|
poll_resp = test_client.post(
|
||||||
|
"/v1/miners/poll",
|
||||||
|
json={"max_wait_seconds": 1},
|
||||||
|
headers={"X-Api-Key": "miner_dev_key_1"},
|
||||||
|
)
|
||||||
|
assert poll_resp.status_code in (200, 204)
|
||||||
|
|
||||||
|
# submit result
|
||||||
|
result_payload = {
|
||||||
|
"result": {"units": 1, "unit_type": "gpu_seconds", "price": 1},
|
||||||
|
"metrics": {"units": 1, "duration_ms": 500}
|
||||||
|
}
|
||||||
|
result_resp = test_client.post(
|
||||||
|
f"/v1/miners/{job_id}/result",
|
||||||
|
json=result_payload,
|
||||||
|
headers={"X-Api-Key": "miner_dev_key_1"},
|
||||||
|
)
|
||||||
|
assert result_resp.status_code == 200
|
||||||
|
signed_receipt = result_resp.json()["receipt"]
|
||||||
|
assert signed_receipt["signature"]["alg"] == "Ed25519"
|
||||||
|
|
||||||
|
# fetch receipt via client endpoint
|
||||||
|
receipt_resp = test_client.get(
|
||||||
|
f"/v1/jobs/{job_id}/receipt",
|
||||||
|
headers={"X-Api-Key": "client_dev_key_1"},
|
||||||
|
)
|
||||||
|
assert receipt_resp.status_code == 200
|
||||||
|
payload = receipt_resp.json()
|
||||||
|
assert payload["receipt_id"] == signed_receipt["receipt_id"]
|
||||||
|
assert payload["signature"]["alg"] == "Ed25519"
|
||||||
|
|
||||||
|
settings.receipt_signing_key_hex = None
|
||||||
57
apps/coordinator-api/tests/test_jobs.py
Normal file
57
apps/coordinator-api/tests/test_jobs.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
import pytest
|
||||||
|
from sqlmodel import Session, delete
|
||||||
|
|
||||||
|
from app.domain import Job, Miner
|
||||||
|
from app.models import JobCreate
|
||||||
|
from app.services.jobs import JobService
|
||||||
|
from app.storage.db import init_db, session_scope
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module", autouse=True)
|
||||||
|
def _init_db(tmp_path_factory):
|
||||||
|
db_file = tmp_path_factory.mktemp("data") / "test.db"
|
||||||
|
# override settings dynamically
|
||||||
|
from app.config import settings
|
||||||
|
|
||||||
|
settings.database_url = f"sqlite:///{db_file}"
|
||||||
|
init_db()
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def session():
|
||||||
|
with session_scope() as sess:
|
||||||
|
sess.exec(delete(Job))
|
||||||
|
sess.exec(delete(Miner))
|
||||||
|
sess.commit()
|
||||||
|
yield sess
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_and_fetch_job(session: Session):
|
||||||
|
svc = JobService(session)
|
||||||
|
job = svc.create_job("client1", JobCreate(payload={"task": "noop"}))
|
||||||
|
fetched = svc.get_job(job.id, client_id="client1")
|
||||||
|
assert fetched.id == job.id
|
||||||
|
assert fetched.payload["task"] == "noop"
|
||||||
|
|
||||||
|
|
||||||
|
def test_acquire_next_job(session: Session):
|
||||||
|
svc = JobService(session)
|
||||||
|
job1 = svc.create_job("client1", JobCreate(payload={"n": 1}))
|
||||||
|
job2 = svc.create_job("client1", JobCreate(payload={"n": 2}))
|
||||||
|
|
||||||
|
miner = Miner(id="miner1", capabilities={}, concurrency=1)
|
||||||
|
session.add(miner)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
next_job = svc.acquire_next_job(miner)
|
||||||
|
assert next_job is not None
|
||||||
|
assert next_job.id == job1.id
|
||||||
|
assert next_job.state == "RUNNING"
|
||||||
|
|
||||||
|
next_job2 = svc.acquire_next_job(miner)
|
||||||
|
assert next_job2 is not None
|
||||||
|
assert next_job2.id == job2.id
|
||||||
|
|
||||||
|
# No more jobs
|
||||||
|
assert svc.acquire_next_job(miner) is None
|
||||||
258
apps/coordinator-api/tests/test_miner_service.py
Normal file
258
apps/coordinator-api/tests/test_miner_service.py
Normal file
@ -0,0 +1,258 @@
|
|||||||
|
import pytest
|
||||||
|
from sqlmodel import Session
|
||||||
|
from nacl.signing import SigningKey
|
||||||
|
|
||||||
|
from aitbc_crypto.signing import ReceiptVerifier
|
||||||
|
|
||||||
|
from app.models import MinerRegister, JobCreate, Constraints
|
||||||
|
from app.services.jobs import JobService
|
||||||
|
from app.services.miners import MinerService
|
||||||
|
from app.services.receipts import ReceiptService
|
||||||
|
from app.storage.db import init_db, session_scope
|
||||||
|
from app.config import settings
|
||||||
|
from app.domain import JobReceipt
|
||||||
|
from sqlmodel import select
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module", autouse=True)
|
||||||
|
def _init_db(tmp_path_factory):
|
||||||
|
db_file = tmp_path_factory.mktemp("data") / "miner.db"
|
||||||
|
from app.config import settings
|
||||||
|
|
||||||
|
settings.database_url = f"sqlite:///{db_file}"
|
||||||
|
init_db()
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def session():
|
||||||
|
with session_scope() as sess:
|
||||||
|
yield sess
|
||||||
|
|
||||||
|
|
||||||
|
def test_register_and_poll_inflight(session: Session):
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
job_service = JobService(session)
|
||||||
|
|
||||||
|
miner_service.register(
|
||||||
|
"miner-1",
|
||||||
|
MinerRegister(
|
||||||
|
capabilities={"gpu": False},
|
||||||
|
concurrency=1,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
job_service.create_job("client-a", JobCreate(payload={"task": "demo"}))
|
||||||
|
assigned = miner_service.poll("miner-1", max_wait_seconds=1)
|
||||||
|
assert assigned is not None
|
||||||
|
|
||||||
|
miner = miner_service.get("miner-1")
|
||||||
|
assert miner.inflight == 1
|
||||||
|
|
||||||
|
miner_service.release("miner-1")
|
||||||
|
miner = miner_service.get("miner-1")
|
||||||
|
assert miner.inflight == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_heartbeat_updates_metadata(session: Session):
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
|
||||||
|
miner_service.register(
|
||||||
|
"miner-2",
|
||||||
|
MinerRegister(
|
||||||
|
capabilities={"gpu": True},
|
||||||
|
concurrency=2,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
miner_service.heartbeat(
|
||||||
|
"miner-2",
|
||||||
|
payload=dict(inflight=3, status="BUSY", metadata={"load": 0.9}),
|
||||||
|
)
|
||||||
|
|
||||||
|
miner = miner_service.get("miner-2")
|
||||||
|
assert miner.status == "BUSY"
|
||||||
|
assert miner.inflight == 3
|
||||||
|
assert miner.extra_metadata.get("load") == 0.9
|
||||||
|
|
||||||
|
|
||||||
|
def test_capability_constrained_assignment(session: Session):
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
job_service = JobService(session)
|
||||||
|
|
||||||
|
miner = miner_service.register(
|
||||||
|
"miner-cap",
|
||||||
|
MinerRegister(
|
||||||
|
capabilities={
|
||||||
|
"gpus": [{"name": "NVIDIA RTX 4090", "memory_mb": 24576}],
|
||||||
|
"models": ["stable-diffusion", "llama"]
|
||||||
|
},
|
||||||
|
concurrency=1,
|
||||||
|
region="eu-west",
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
job_service.create_job(
|
||||||
|
"client-x",
|
||||||
|
JobCreate(
|
||||||
|
payload={"task": "render"},
|
||||||
|
constraints=Constraints(region="us-east"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
job_service.create_job(
|
||||||
|
"client-x",
|
||||||
|
JobCreate(
|
||||||
|
payload={"task": "render-hf"},
|
||||||
|
constraints=Constraints(
|
||||||
|
region="eu-west",
|
||||||
|
gpu="NVIDIA RTX 4090",
|
||||||
|
min_vram_gb=12,
|
||||||
|
models=["stable-diffusion"],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assigned = miner_service.poll("miner-cap", max_wait_seconds=1)
|
||||||
|
assert assigned is not None
|
||||||
|
assert assigned.job_id is not None
|
||||||
|
assert assigned.payload["task"] == "render-hf"
|
||||||
|
|
||||||
|
miner_state = miner_service.get("miner-cap")
|
||||||
|
assert miner_state.inflight == 1
|
||||||
|
|
||||||
|
miner_service.release("miner-cap")
|
||||||
|
|
||||||
|
|
||||||
|
def test_price_constraint(session: Session):
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
job_service = JobService(session)
|
||||||
|
|
||||||
|
miner_service.register(
|
||||||
|
"miner-price",
|
||||||
|
MinerRegister(
|
||||||
|
capabilities={
|
||||||
|
"gpus": [{"name": "NVIDIA RTX 3070", "memory_mb": 8192}],
|
||||||
|
"models": [],
|
||||||
|
"price": 3.5,
|
||||||
|
},
|
||||||
|
concurrency=1,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
job_service.create_job(
|
||||||
|
"client-y",
|
||||||
|
JobCreate(
|
||||||
|
payload={"task": "cheap"},
|
||||||
|
constraints=Constraints(max_price=2.0),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
job_service.create_job(
|
||||||
|
"client-y",
|
||||||
|
JobCreate(
|
||||||
|
payload={"task": "fair"},
|
||||||
|
constraints=Constraints(max_price=4.0),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
assigned = miner_service.poll("miner-price", max_wait_seconds=1)
|
||||||
|
assert assigned is not None
|
||||||
|
assert assigned.payload["task"] == "fair"
|
||||||
|
|
||||||
|
miner_service.release("miner-price")
|
||||||
|
|
||||||
|
|
||||||
|
def test_receipt_signing(session: Session):
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
settings.receipt_signing_key_hex = signing_key.encode().hex()
|
||||||
|
|
||||||
|
job_service = JobService(session)
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
receipt_service = ReceiptService(session)
|
||||||
|
|
||||||
|
miner_service.register(
|
||||||
|
"miner-r",
|
||||||
|
MinerRegister(
|
||||||
|
capabilities={"price": 1.0},
|
||||||
|
concurrency=1,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
job = job_service.create_job(
|
||||||
|
"client-r",
|
||||||
|
JobCreate(payload={"task": "sign"}),
|
||||||
|
)
|
||||||
|
|
||||||
|
receipt = receipt_service.create_receipt(
|
||||||
|
job,
|
||||||
|
"miner-r",
|
||||||
|
{"units": 1.0, "unit_type": "gpu_seconds", "price": 1.2},
|
||||||
|
{"units": 1.0},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert receipt is not None
|
||||||
|
signature = receipt.get("signature")
|
||||||
|
assert signature is not None
|
||||||
|
assert signature["alg"] == "Ed25519"
|
||||||
|
|
||||||
|
miner_service.release("miner-r", success=True, duration_ms=500, receipt_id=receipt["receipt_id"])
|
||||||
|
miner_state = miner_service.get("miner-r")
|
||||||
|
assert miner_state.jobs_completed == 1
|
||||||
|
assert miner_state.total_job_duration_ms == 500
|
||||||
|
assert miner_state.average_job_duration_ms == 500
|
||||||
|
assert miner_state.last_receipt_id == receipt["receipt_id"]
|
||||||
|
|
||||||
|
verifier = ReceiptVerifier(signing_key.verify_key.encode())
|
||||||
|
payload = {k: v for k, v in receipt.items() if k not in {"signature", "attestations"}}
|
||||||
|
assert verifier.verify(payload, receipt["signature"]) is True
|
||||||
|
|
||||||
|
# Reset signing key for subsequent tests
|
||||||
|
settings.receipt_signing_key_hex = None
|
||||||
|
|
||||||
|
|
||||||
|
def test_receipt_signing_with_attestation(session: Session):
|
||||||
|
signing_key = SigningKey.generate()
|
||||||
|
attest_key = SigningKey.generate()
|
||||||
|
settings.receipt_signing_key_hex = signing_key.encode().hex()
|
||||||
|
settings.receipt_attestation_key_hex = attest_key.encode().hex()
|
||||||
|
|
||||||
|
job_service = JobService(session)
|
||||||
|
miner_service = MinerService(session)
|
||||||
|
receipt_service = ReceiptService(session)
|
||||||
|
|
||||||
|
miner_service.register(
|
||||||
|
"miner-attest",
|
||||||
|
MinerRegister(capabilities={"price": 1.0}, concurrency=1),
|
||||||
|
)
|
||||||
|
|
||||||
|
job = job_service.create_job(
|
||||||
|
"client-attest",
|
||||||
|
JobCreate(payload={"task": "attest"}),
|
||||||
|
)
|
||||||
|
|
||||||
|
receipt = receipt_service.create_receipt(
|
||||||
|
job,
|
||||||
|
"miner-attest",
|
||||||
|
{"units": 1.0, "unit_type": "gpu_seconds", "price": 2.0},
|
||||||
|
{"units": 1.0},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert receipt is not None
|
||||||
|
assert receipt.get("signature") is not None
|
||||||
|
attestations = receipt.get("attestations")
|
||||||
|
assert attestations is not None and len(attestations) == 1
|
||||||
|
|
||||||
|
stored_receipts = session.exec(select(JobReceipt).where(JobReceipt.job_id == job.id)).all()
|
||||||
|
assert len(stored_receipts) == 1
|
||||||
|
assert stored_receipts[0].receipt_id == receipt["receipt_id"]
|
||||||
|
|
||||||
|
payload = {k: v for k, v in receipt.items() if k not in {"signature", "attestations"}}
|
||||||
|
|
||||||
|
miner_verifier = ReceiptVerifier(signing_key.verify_key.encode())
|
||||||
|
assert miner_verifier.verify(payload, receipt["signature"]) is True
|
||||||
|
|
||||||
|
attest_verifier = ReceiptVerifier(attest_key.verify_key.encode())
|
||||||
|
assert attest_verifier.verify(payload, attestations[0]) is True
|
||||||
|
|
||||||
|
settings.receipt_signing_key_hex = None
|
||||||
|
settings.receipt_attestation_key_hex = None
|
||||||
|
|
||||||
158
apps/explorer-web/README.md
Normal file
158
apps/explorer-web/README.md
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
# Explorer Web
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
Static web explorer for the AITBC blockchain node, displaying blocks, transactions, and receipts as outlined in `docs/bootstrap/explorer_web.md`.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
- Install dependencies:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
- Start the dev server (Vite):
|
||||||
|
```bash
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
- The explorer ships with mock data in `public/mock/` that powers the tables by default.
|
||||||
|
|
||||||
|
### Data Mode Toggle
|
||||||
|
|
||||||
|
- Configuration lives in `src/config.ts` and can be overridden with environment variables.
|
||||||
|
- Use `VITE_DATA_MODE` to choose between `mock` (default) and `live`.
|
||||||
|
- When switching to live data, set `VITE_COORDINATOR_API` to the coordinator base URL (e.g. `http://localhost:8000`).
|
||||||
|
- Example `.env` snippet:
|
||||||
|
```bash
|
||||||
|
VITE_DATA_MODE=live
|
||||||
|
VITE_COORDINATOR_API=https://coordinator.dev.internal
|
||||||
|
```
|
||||||
|
With live mode enabled, the SPA will request `/v1/<resource>` routes from the coordinator instead of the bundled mock JSON.
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
- Build out responsive styling and navigation interactions.
|
||||||
|
- Extend the data layer to support coordinator authentication and pagination when live endpoints are ready.
|
||||||
|
- Document coordinator API assumptions once the backend contracts stabilize.
|
||||||
|
|
||||||
|
## Coordinator API Contracts (Draft)
|
||||||
|
|
||||||
|
- **Blocks** (`GET /v1/blocks?limit=&offset=`)
|
||||||
|
- Expected payload:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"height": 12045,
|
||||||
|
"hash": "0x...",
|
||||||
|
"timestamp": "2025-09-27T01:58:12Z",
|
||||||
|
"tx_count": 8,
|
||||||
|
"proposer": "miner-alpha"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"next_offset": 12040
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- TODO: confirm pagination fields and proposer metadata.
|
||||||
|
|
||||||
|
- **Transactions** (`GET /v1/transactions?limit=&offset=`)
|
||||||
|
- Expected payload:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"hash": "0x...",
|
||||||
|
"block": 12045,
|
||||||
|
"from": "0x...",
|
||||||
|
"to": "0x...",
|
||||||
|
"value": "12.5",
|
||||||
|
"status": "Succeeded"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"next_offset": "0x..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- TODO: finalize value units (AIT vs wei) and status enum.
|
||||||
|
|
||||||
|
- **Addresses** (`GET /v1/addresses/{address}`)
|
||||||
|
- Expected payload:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"address": "0x...",
|
||||||
|
"balance": "1450.25",
|
||||||
|
"tx_count": 42,
|
||||||
|
"last_active": "2025-09-27T01:48:00Z",
|
||||||
|
"recent_transactions": ["0x..."]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- TODO: detail pagination for recent transactions and add receipt summary references.
|
||||||
|
|
||||||
|
- **Receipts** (`GET /v1/jobs/{job_id}/receipts`)
|
||||||
|
- Expected payload:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"job_id": "job-0001",
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"receipt_id": "rcpt-123",
|
||||||
|
"miner": "miner-alpha",
|
||||||
|
"coordinator": "coordinator-001",
|
||||||
|
"issued_at": "2025-09-27T01:52:22Z",
|
||||||
|
"status": "Attested",
|
||||||
|
"payload": {
|
||||||
|
"miner_signature": "0x...",
|
||||||
|
"coordinator_signature": "0x..."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- TODO: confirm signature payload structure and include attestation metadata.
|
||||||
|
|
||||||
|
## Styling Guide
|
||||||
|
|
||||||
|
- **`public/css/base.css`**
|
||||||
|
- Defines global typography, color scheme, and utility classes (tables, placeholders, code tags).
|
||||||
|
- Use this file for cross-page primitives and reset/normalization rules.
|
||||||
|
- When adding new utilities (e.g., badges, alerts), document them in this section and keep naming consistent with the existing BEM-lite approach.
|
||||||
|
|
||||||
|
- **`public/css/layout.css`**
|
||||||
|
- Contains structural styles for the Explorer shell (header, footer, cards, forms, grids).
|
||||||
|
- Encapsulate component-specific classes with a predictable prefix, such as `.blocks__table`, `.addresses__input-group`, or `.receipts__controls`.
|
||||||
|
- Prefer utility classes from `base.css` when possible, and only introduce new layout classes when a component requires dedicated styling.
|
||||||
|
|
||||||
|
- **Adding New Components**
|
||||||
|
- Create semantic markup first in `src/pages/` or `src/components/`, using descriptive class names that map to the page or component (`.transactions__filter`, `.overview__chart`).
|
||||||
|
- Extend `layout.css` with matching selectors to style the new elements; keep related rules grouped together for readability.
|
||||||
|
- For reusable widgets across multiple pages, consider extracting shared styles into a dedicated section or introducing a new partial CSS file when the component becomes complex.
|
||||||
|
|
||||||
|
## Deployment Notes
|
||||||
|
|
||||||
|
- **Environment Variables**
|
||||||
|
- `VITE_DATA_MODE`: `mock` (default) or `live`.
|
||||||
|
- `VITE_COORDINATOR_API`: Base URL for coordinator API when `live` mode is enabled.
|
||||||
|
- Additional Vite variables can be added following the `VITE_*` naming convention.
|
||||||
|
|
||||||
|
- **Mock vs Live**
|
||||||
|
- In non-production environments, keep `VITE_DATA_MODE=mock` to serve the static JSON under `public/mock/` for quick demos.
|
||||||
|
- For staging/production deployments, set `VITE_DATA_MODE=live` and ensure the coordinator endpoint is reachable from the frontend origin; configure CORS accordingly on the backend.
|
||||||
|
- Consider serving mock JSON from a CDN or static bucket if you want deterministic demos while backend dependencies are under development.
|
||||||
|
|
||||||
|
- **Build & Deploy**
|
||||||
|
- Build command: `npm run build` (outputs to `dist/`).
|
||||||
|
- Preview locally with `npm run preview` before publishing.
|
||||||
|
- Deploy the `dist/` contents to your static host (e.g., Nginx, S3 + CloudFront, Vercel). Ensure environment variables are injected at build time or through runtime configuration mechanisms supported by your hosting provider.
|
||||||
|
|
||||||
|
## Error Handling (Live Mode)
|
||||||
|
|
||||||
|
- **Status Codes**
|
||||||
|
- `2xx`: Treat as success; map response bodies into the typed models in `src/lib/models.ts`.
|
||||||
|
- `4xx`: Surface actionable messages to the user (e.g., invalid job ID). For `404`, show “not found” states in the relevant page. For `429`, display a rate-limit notice and back off.
|
||||||
|
- `5xx`: Show a generic coordinator outage message and trigger retry logic.
|
||||||
|
|
||||||
|
- **Retry Strategy**
|
||||||
|
- Use an exponential backoff with jitter when retrying `5xx` or network failures (suggested base delay 500 ms, max 5 attempts).
|
||||||
|
- Do not retry on `4xx` except `429`; instead, display feedback.
|
||||||
|
|
||||||
|
- **Telemetry & Logging**
|
||||||
|
- Consider emitting console warnings or hooking into an analytics layer when retries occur, noting the endpoint and status code.
|
||||||
|
- Bubble critical errors via a shared notification component so users understand whether data is stale or unavailable.
|
||||||
15
apps/explorer-web/package.json
Normal file
15
apps/explorer-web/package.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "aitbc-explorer-web",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"build": "vite build",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"typescript": "^5.4.0",
|
||||||
|
"vite": "^5.2.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
82
apps/explorer-web/public/css/base.css
Normal file
82
apps/explorer-web/public/css/base.css
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
:root {
|
||||||
|
color-scheme: dark;
|
||||||
|
font-family: var(--font-base);
|
||||||
|
font-size: 16px;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
background-color: var(--color-bg);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: var(--color-primary);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover,
|
||||||
|
a:focus {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0 0 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6 {
|
||||||
|
margin: 0 0 0.75rem;
|
||||||
|
line-height: 1.2;
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
font-family: var(--font-mono);
|
||||||
|
font-size: 0.95em;
|
||||||
|
background: var(--color-table-head);
|
||||||
|
padding: 0.125rem 0.375rem;
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin: 1rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table thead {
|
||||||
|
background: var(--color-table-head);
|
||||||
|
}
|
||||||
|
|
||||||
|
.table th,
|
||||||
|
.table td {
|
||||||
|
padding: 0.75rem;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table tbody tr:nth-child(even) {
|
||||||
|
background: var(--color-table-even);
|
||||||
|
}
|
||||||
|
|
||||||
|
.table tbody tr:hover {
|
||||||
|
background: var(--color-primary-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.placeholder {
|
||||||
|
color: var(--color-placeholder);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.lead {
|
||||||
|
font-size: 1.05rem;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
229
apps/explorer-web/public/css/layout.css
Normal file
229
apps/explorer-web/public/css/layout.css
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
.site-header {
|
||||||
|
background: rgba(22, 27, 34, 0.95);
|
||||||
|
border-bottom: 1px solid rgba(125, 196, 255, 0.2);
|
||||||
|
position: sticky;
|
||||||
|
top: 0;
|
||||||
|
z-index: 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__inner {
|
||||||
|
margin: 0 auto;
|
||||||
|
max-width: 1200px;
|
||||||
|
padding: 0.75rem 1.5rem;
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
gap: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__brand {
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 1.15rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__title {
|
||||||
|
flex: 1 1 auto;
|
||||||
|
font-size: 1.25rem;
|
||||||
|
color: rgba(244, 246, 251, 0.92);
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__controls {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-mode-toggle {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.25rem;
|
||||||
|
font-size: 0.85rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-mode-toggle select {
|
||||||
|
border-radius: var(--radius-sm);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
background: var(--color-surface);
|
||||||
|
color: inherit;
|
||||||
|
padding: 0.25rem 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.data-mode-toggle small {
|
||||||
|
color: var(--color-text-muted);
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.75rem;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav a {
|
||||||
|
padding: 0.35rem 0.75rem;
|
||||||
|
border-radius: 999px;
|
||||||
|
transition: background 150ms ease;
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav a:hover,
|
||||||
|
.site-header__nav a:focus {
|
||||||
|
background: rgba(125, 196, 255, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav a:focus-visible {
|
||||||
|
box-shadow: 0 0 0 2px rgba(125, 196, 255, 0.7);
|
||||||
|
}
|
||||||
|
|
||||||
|
.page {
|
||||||
|
margin: 0 auto;
|
||||||
|
max-width: 1200px;
|
||||||
|
padding: 2rem 1.5rem 4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.site-header__inner {
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__controls {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav {
|
||||||
|
width: 100%;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-header__nav a {
|
||||||
|
flex: 1 1 auto;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.5rem;
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__table,
|
||||||
|
.blocks__table,
|
||||||
|
.transactions__table,
|
||||||
|
.receipts__table {
|
||||||
|
background: rgba(18, 22, 29, 0.85);
|
||||||
|
border-radius: 0.75rem;
|
||||||
|
overflow: hidden;
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.12);
|
||||||
|
}
|
||||||
|
|
||||||
|
.overview__grid {
|
||||||
|
display: grid;
|
||||||
|
gap: 1.5rem;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
|
||||||
|
}
|
||||||
|
|
||||||
|
.card {
|
||||||
|
background: rgba(18, 22, 29, 0.85);
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.12);
|
||||||
|
border-radius: 0.75rem;
|
||||||
|
padding: 1.25rem;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-list {
|
||||||
|
list-style: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-list li + li {
|
||||||
|
margin-top: 0.35rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__search {
|
||||||
|
display: grid;
|
||||||
|
gap: 0.75rem;
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
background: rgba(18, 22, 29, 0.7);
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
padding: 1rem 1.25rem;
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.12);
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__input-group {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__input-group input,
|
||||||
|
.addresses__input-group button {
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.25);
|
||||||
|
padding: 0.5rem 0.75rem;
|
||||||
|
background: rgba(12, 15, 20, 0.85);
|
||||||
|
color: inherit;
|
||||||
|
outline: none;
|
||||||
|
transition: border-color 150ms ease, box-shadow 150ms ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__input-group input:focus-visible {
|
||||||
|
border-color: rgba(125, 196, 255, 0.6);
|
||||||
|
box-shadow: 0 0 0 2px rgba(125, 196, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.addresses__input-group button {
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.receipts__controls {
|
||||||
|
display: grid;
|
||||||
|
gap: 0.75rem;
|
||||||
|
margin-bottom: 1.5rem;
|
||||||
|
background: rgba(18, 22, 29, 0.7);
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
padding: 1rem 1.25rem;
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.12);
|
||||||
|
}
|
||||||
|
|
||||||
|
.receipts__input-group {
|
||||||
|
display: flex;
|
||||||
|
gap: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.receipts__input-group input,
|
||||||
|
.receipts__input-group button {
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
border: 1px solid rgba(125, 196, 255, 0.25);
|
||||||
|
padding: 0.5rem 0.75rem;
|
||||||
|
background: rgba(12, 15, 20, 0.85);
|
||||||
|
color: inherit;
|
||||||
|
outline: none;
|
||||||
|
transition: border-color 150ms ease, box-shadow 150ms ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.receipts__input-group input:focus-visible {
|
||||||
|
border-color: rgba(125, 196, 255, 0.6);
|
||||||
|
box-shadow: 0 0 0 2px rgba(125, 196, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.receipts__input-group button {
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-footer {
|
||||||
|
margin: 0;
|
||||||
|
border-top: 1px solid rgba(125, 196, 255, 0.2);
|
||||||
|
background: rgba(22, 27, 34, 0.95);
|
||||||
|
}
|
||||||
|
|
||||||
|
.site-footer__inner {
|
||||||
|
margin: 0 auto;
|
||||||
|
max-width: 1200px;
|
||||||
|
padding: 1.25rem 1.5rem;
|
||||||
|
color: rgba(244, 246, 251, 0.7);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
38
apps/explorer-web/public/css/theme.css
Normal file
38
apps/explorer-web/public/css/theme.css
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
:root {
|
||||||
|
color-scheme: dark;
|
||||||
|
--font-base: "Inter", system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||||
|
--font-mono: "Fira Code", "Source Code Pro", Menlo, Consolas, monospace;
|
||||||
|
|
||||||
|
--color-bg: #0b0d10;
|
||||||
|
--color-surface: rgba(18, 22, 29, 0.85);
|
||||||
|
--color-surface-muted: rgba(18, 22, 29, 0.7);
|
||||||
|
--color-border: rgba(125, 196, 255, 0.12);
|
||||||
|
--color-border-strong: rgba(125, 196, 255, 0.2);
|
||||||
|
--color-text-primary: #f4f6fb;
|
||||||
|
--color-text-secondary: rgba(244, 246, 251, 0.7);
|
||||||
|
--color-text-muted: rgba(244, 246, 251, 0.6);
|
||||||
|
--color-primary: #7dc4ff;
|
||||||
|
--color-primary-hover: rgba(125, 196, 255, 0.15);
|
||||||
|
--color-focus-ring: rgba(125, 196, 255, 0.7);
|
||||||
|
--color-placeholder: rgba(244, 246, 251, 0.7);
|
||||||
|
--color-table-even: rgba(255, 255, 255, 0.02);
|
||||||
|
--color-table-head: rgba(255, 255, 255, 0.06);
|
||||||
|
--color-shadow-soft: rgba(0, 0, 0, 0.35);
|
||||||
|
|
||||||
|
--space-xs: 0.35rem;
|
||||||
|
--space-sm: 0.5rem;
|
||||||
|
--space-md: 0.75rem;
|
||||||
|
--space-lg: 1.25rem;
|
||||||
|
--space-xl: 2rem;
|
||||||
|
--radius-sm: 0.375rem;
|
||||||
|
--radius-md: 0.5rem;
|
||||||
|
--radius-lg: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
:root[data-mode="live"] {
|
||||||
|
--color-primary: #8ef9d0;
|
||||||
|
--color-primary-hover: rgba(142, 249, 208, 0.18);
|
||||||
|
--color-border: rgba(142, 249, 208, 0.12);
|
||||||
|
--color-border-strong: rgba(142, 249, 208, 0.24);
|
||||||
|
--color-focus-ring: rgba(142, 249, 208, 0.65);
|
||||||
|
}
|
||||||
14
apps/explorer-web/public/mock/addresses.json
Normal file
14
apps/explorer-web/public/mock/addresses.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"address": "0xfeedfacefeedfacefeedfacefeedfacefeedface",
|
||||||
|
"balance": "1450.25 AIT",
|
||||||
|
"txCount": 42,
|
||||||
|
"lastActive": "2025-09-27T01:48:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"address": "0xcafebabecafebabecafebabecafebabecafebabe",
|
||||||
|
"balance": "312.00 AIT",
|
||||||
|
"txCount": 9,
|
||||||
|
"lastActive": "2025-09-27T01:25:34Z"
|
||||||
|
}
|
||||||
|
]
|
||||||
23
apps/explorer-web/public/mock/blocks.json
Normal file
23
apps/explorer-web/public/mock/blocks.json
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"height": 12045,
|
||||||
|
"hash": "0x7a3f5bf5c3b8ed5d6f77a42b8ab9a421e91e23f4d2a3f6a1d4b5c6d7e8f90123",
|
||||||
|
"timestamp": "2025-09-27T01:58:12Z",
|
||||||
|
"txCount": 8,
|
||||||
|
"proposer": "miner-alpha"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"height": 12044,
|
||||||
|
"hash": "0x5dd4e7a2b88c56f4cbb8f6e21d332e2f1a765e8d9c0b12a34567890abcdef012",
|
||||||
|
"timestamp": "2025-09-27T01:56:43Z",
|
||||||
|
"txCount": 11,
|
||||||
|
"proposer": "miner-beta"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"height": 12043,
|
||||||
|
"hash": "0x1b9d2c3f4e5a67890b12c34d56e78f90a1b2c3d4e5f60718293a4b5c6d7e8f90",
|
||||||
|
"timestamp": "2025-09-27T01:54:16Z",
|
||||||
|
"txCount": 4,
|
||||||
|
"proposer": "miner-gamma"
|
||||||
|
}
|
||||||
|
]
|
||||||
18
apps/explorer-web/public/mock/receipts.json
Normal file
18
apps/explorer-web/public/mock/receipts.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"jobId": "job-0001",
|
||||||
|
"receiptId": "rcpt-123",
|
||||||
|
"miner": "miner-alpha",
|
||||||
|
"coordinator": "coordinator-001",
|
||||||
|
"issuedAt": "2025-09-27T01:52:22Z",
|
||||||
|
"status": "Attested"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"jobId": "job-0002",
|
||||||
|
"receiptId": "rcpt-124",
|
||||||
|
"miner": "miner-beta",
|
||||||
|
"coordinator": "coordinator-001",
|
||||||
|
"issuedAt": "2025-09-27T01:45:18Z",
|
||||||
|
"status": "Pending"
|
||||||
|
}
|
||||||
|
]
|
||||||
18
apps/explorer-web/public/mock/transactions.json
Normal file
18
apps/explorer-web/public/mock/transactions.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"hash": "0xabc1230000000000000000000000000000000000000000000000000000000001",
|
||||||
|
"block": 12045,
|
||||||
|
"from": "0xfeedfacefeedfacefeedfacefeedfacefeedface",
|
||||||
|
"to": "0xcafebabecafebabecafebabecafebabecafebabe",
|
||||||
|
"value": "12.5 AIT",
|
||||||
|
"status": "Succeeded"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "0xabc1230000000000000000000000000000000000000000000000000000000002",
|
||||||
|
"block": 12044,
|
||||||
|
"from": "0xdeadc0dedeadc0dedeadc0dedeadc0dedeadc0de",
|
||||||
|
"to": "0x8badf00d8badf00d8badf00d8badf00d8badf00d",
|
||||||
|
"value": "3.1 AIT",
|
||||||
|
"status": "Pending"
|
||||||
|
}
|
||||||
|
]
|
||||||
33
apps/explorer-web/src/components/dataModeToggle.js
Normal file
33
apps/explorer-web/src/components/dataModeToggle.js
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.initDataModeToggle = initDataModeToggle;
|
||||||
|
var config_1 = require("../config");
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
var LABELS = {
|
||||||
|
mock: "Mock Data",
|
||||||
|
live: "Live API",
|
||||||
|
};
|
||||||
|
function initDataModeToggle(onChange) {
|
||||||
|
var container = document.querySelector("[data-role='data-mode-toggle']");
|
||||||
|
if (!container) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
container.innerHTML = renderControls((0, mockData_1.getDataMode)());
|
||||||
|
var select = container.querySelector("select[data-mode-select]");
|
||||||
|
if (!select) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
select.value = (0, mockData_1.getDataMode)();
|
||||||
|
select.addEventListener("change", function (event) {
|
||||||
|
var value = event.target.value;
|
||||||
|
(0, mockData_1.setDataMode)(value);
|
||||||
|
document.documentElement.dataset.mode = value;
|
||||||
|
onChange();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function renderControls(mode) {
|
||||||
|
var options = Object.keys(LABELS)
|
||||||
|
.map(function (id) { return "<option value=\"".concat(id, "\" ").concat(id === mode ? "selected" : "", ">").concat(LABELS[id], "</option>"); })
|
||||||
|
.join("");
|
||||||
|
return "\n <label class=\"data-mode-toggle\">\n <span>Data Mode</span>\n <select data-mode-select>\n ".concat(options, "\n </select>\n <small>").concat(mode === "mock" ? "Static JSON samples" : "Coordinator API (".concat(config_1.CONFIG.apiBaseUrl, ")"), "</small>\n </label>\n ");
|
||||||
|
}
|
||||||
45
apps/explorer-web/src/components/dataModeToggle.ts
Normal file
45
apps/explorer-web/src/components/dataModeToggle.ts
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import { CONFIG, type DataMode } from "../config";
|
||||||
|
import { getDataMode, setDataMode } from "../lib/mockData";
|
||||||
|
|
||||||
|
const LABELS: Record<DataMode, string> = {
|
||||||
|
mock: "Mock Data",
|
||||||
|
live: "Live API",
|
||||||
|
};
|
||||||
|
|
||||||
|
export function initDataModeToggle(onChange: () => void): void {
|
||||||
|
const container = document.querySelector<HTMLDivElement>("[data-role='data-mode-toggle']");
|
||||||
|
if (!container) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
container.innerHTML = renderControls(getDataMode());
|
||||||
|
|
||||||
|
const select = container.querySelector<HTMLSelectElement>("select[data-mode-select]");
|
||||||
|
if (!select) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
select.value = getDataMode();
|
||||||
|
select.addEventListener("change", (event) => {
|
||||||
|
const value = (event.target as HTMLSelectElement).value as DataMode;
|
||||||
|
setDataMode(value);
|
||||||
|
document.documentElement.dataset.mode = value;
|
||||||
|
onChange();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderControls(mode: DataMode): string {
|
||||||
|
const options = (Object.keys(LABELS) as DataMode[])
|
||||||
|
.map((id) => `<option value="${id}" ${id === mode ? "selected" : ""}>${LABELS[id]}</option>`)
|
||||||
|
.join("");
|
||||||
|
|
||||||
|
return `
|
||||||
|
<label class="data-mode-toggle">
|
||||||
|
<span>Data Mode</span>
|
||||||
|
<select data-mode-select>
|
||||||
|
${options}
|
||||||
|
</select>
|
||||||
|
<small>${mode === "mock" ? "Static JSON samples" : `Coordinator API (${CONFIG.apiBaseUrl})`}</small>
|
||||||
|
</label>
|
||||||
|
`;
|
||||||
|
}
|
||||||
7
apps/explorer-web/src/components/siteFooter.js
Normal file
7
apps/explorer-web/src/components/siteFooter.js
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.siteFooter = siteFooter;
|
||||||
|
function siteFooter() {
|
||||||
|
var year = new Date().getFullYear();
|
||||||
|
return "\n <footer class=\"site-footer\">\n <div class=\"site-footer__inner\">\n <p>© ".concat(year, " AITBC Foundation. Explorer UI under active development.</p>\n </div>\n </footer>\n ");
|
||||||
|
}
|
||||||
10
apps/explorer-web/src/components/siteFooter.ts
Normal file
10
apps/explorer-web/src/components/siteFooter.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
export function siteFooter(): string {
|
||||||
|
const year = new Date().getFullYear();
|
||||||
|
return `
|
||||||
|
<footer class="site-footer">
|
||||||
|
<div class="site-footer__inner">
|
||||||
|
<p>© ${year} AITBC Foundation. Explorer UI under active development.</p>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
`;
|
||||||
|
}
|
||||||
6
apps/explorer-web/src/components/siteHeader.js
Normal file
6
apps/explorer-web/src/components/siteHeader.js
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.siteHeader = siteHeader;
|
||||||
|
function siteHeader(title) {
|
||||||
|
return "\n <header class=\"site-header\">\n <div class=\"site-header__inner\">\n <a class=\"site-header__brand\" href=\"/\">AITBC Explorer</a>\n <h1 class=\"site-header__title\">".concat(title, "</h1>\n <div class=\"site-header__controls\">\n <div data-role=\"data-mode-toggle\"></div>\n </div>\n <nav class=\"site-header__nav\">\n <a href=\"/\">Overview</a>\n <a href=\"/blocks\">Blocks</a>\n <a href=\"/transactions\">Transactions</a>\n <a href=\"/addresses\">Addresses</a>\n <a href=\"/receipts\">Receipts</a>\n </nav>\n </div>\n </header>\n ");
|
||||||
|
}
|
||||||
20
apps/explorer-web/src/components/siteHeader.ts
Normal file
20
apps/explorer-web/src/components/siteHeader.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
export function siteHeader(title: string): string {
|
||||||
|
return `
|
||||||
|
<header class="site-header">
|
||||||
|
<div class="site-header__inner">
|
||||||
|
<a class="site-header__brand" href="/">AITBC Explorer</a>
|
||||||
|
<h1 class="site-header__title">${title}</h1>
|
||||||
|
<div class="site-header__controls">
|
||||||
|
<div data-role="data-mode-toggle"></div>
|
||||||
|
</div>
|
||||||
|
<nav class="site-header__nav">
|
||||||
|
<a href="/">Overview</a>
|
||||||
|
<a href="/blocks">Blocks</a>
|
||||||
|
<a href="/transactions">Transactions</a>
|
||||||
|
<a href="/addresses">Addresses</a>
|
||||||
|
<a href="/receipts">Receipts</a>
|
||||||
|
</nav>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
`;
|
||||||
|
}
|
||||||
10
apps/explorer-web/src/config.js
Normal file
10
apps/explorer-web/src/config.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"use strict";
|
||||||
|
var _a, _b, _c, _d;
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.CONFIG = void 0;
|
||||||
|
exports.CONFIG = {
|
||||||
|
// Toggle between "mock" (static JSON under public/mock/) and "live" coordinator APIs.
|
||||||
|
dataMode: (_b = (_a = import.meta.env) === null || _a === void 0 ? void 0 : _a.VITE_DATA_MODE) !== null && _b !== void 0 ? _b : "mock",
|
||||||
|
mockBasePath: "/mock",
|
||||||
|
apiBaseUrl: (_d = (_c = import.meta.env) === null || _c === void 0 ? void 0 : _c.VITE_COORDINATOR_API) !== null && _d !== void 0 ? _d : "http://localhost:8000",
|
||||||
|
};
|
||||||
14
apps/explorer-web/src/config.ts
Normal file
14
apps/explorer-web/src/config.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
export type DataMode = "mock" | "live";
|
||||||
|
|
||||||
|
export interface ExplorerConfig {
|
||||||
|
dataMode: DataMode;
|
||||||
|
mockBasePath: string;
|
||||||
|
apiBaseUrl: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const CONFIG: ExplorerConfig = {
|
||||||
|
// Toggle between "mock" (static JSON under public/mock/) and "live" coordinator APIs.
|
||||||
|
dataMode: (import.meta.env?.VITE_DATA_MODE as DataMode) ?? "mock",
|
||||||
|
mockBasePath: "/mock",
|
||||||
|
apiBaseUrl: import.meta.env?.VITE_COORDINATOR_API ?? "http://localhost:8000",
|
||||||
|
};
|
||||||
207
apps/explorer-web/src/lib/mockData.js
Normal file
207
apps/explorer-web/src/lib/mockData.js
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getDataMode = getDataMode;
|
||||||
|
exports.setDataMode = setDataMode;
|
||||||
|
exports.fetchBlocks = fetchBlocks;
|
||||||
|
exports.fetchTransactions = fetchTransactions;
|
||||||
|
exports.fetchAddresses = fetchAddresses;
|
||||||
|
exports.fetchReceipts = fetchReceipts;
|
||||||
|
var config_1 = require("../config");
|
||||||
|
var currentMode = config_1.CONFIG.dataMode;
|
||||||
|
function getDataMode() {
|
||||||
|
return currentMode;
|
||||||
|
}
|
||||||
|
function setDataMode(mode) {
|
||||||
|
currentMode = mode;
|
||||||
|
}
|
||||||
|
function fetchBlocks() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var data, response, data, error_1;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
if (!(getDataMode() === "mock")) return [3 /*break*/, 2];
|
||||||
|
return [4 /*yield*/, fetchMock("blocks")];
|
||||||
|
case 1:
|
||||||
|
data = _a.sent();
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 2:
|
||||||
|
_a.trys.push([2, 5, , 6]);
|
||||||
|
return [4 /*yield*/, fetch("".concat(config_1.CONFIG.apiBaseUrl, "/v1/blocks"))];
|
||||||
|
case 3:
|
||||||
|
response = _a.sent();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to fetch blocks: ".concat(response.status));
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, response.json()];
|
||||||
|
case 4:
|
||||||
|
data = (_a.sent());
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 5:
|
||||||
|
error_1 = _a.sent();
|
||||||
|
console.warn("[Explorer] Failed to fetch live block data", error_1);
|
||||||
|
return [2 /*return*/, []];
|
||||||
|
case 6: return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function fetchTransactions() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var data, response, data, error_2;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
if (!(getDataMode() === "mock")) return [3 /*break*/, 2];
|
||||||
|
return [4 /*yield*/, fetchMock("transactions")];
|
||||||
|
case 1:
|
||||||
|
data = _a.sent();
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 2:
|
||||||
|
_a.trys.push([2, 5, , 6]);
|
||||||
|
return [4 /*yield*/, fetch("".concat(config_1.CONFIG.apiBaseUrl, "/v1/transactions"))];
|
||||||
|
case 3:
|
||||||
|
response = _a.sent();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to fetch transactions: ".concat(response.status));
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, response.json()];
|
||||||
|
case 4:
|
||||||
|
data = (_a.sent());
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 5:
|
||||||
|
error_2 = _a.sent();
|
||||||
|
console.warn("[Explorer] Failed to fetch live transaction data", error_2);
|
||||||
|
return [2 /*return*/, []];
|
||||||
|
case 6: return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function fetchAddresses() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var data, response, data, error_3;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
if (!(getDataMode() === "mock")) return [3 /*break*/, 2];
|
||||||
|
return [4 /*yield*/, fetchMock("addresses")];
|
||||||
|
case 1:
|
||||||
|
data = _a.sent();
|
||||||
|
return [2 /*return*/, Array.isArray(data) ? data : [data]];
|
||||||
|
case 2:
|
||||||
|
_a.trys.push([2, 5, , 6]);
|
||||||
|
return [4 /*yield*/, fetch("".concat(config_1.CONFIG.apiBaseUrl, "/v1/addresses"))];
|
||||||
|
case 3:
|
||||||
|
response = _a.sent();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to fetch addresses: ".concat(response.status));
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, response.json()];
|
||||||
|
case 4:
|
||||||
|
data = (_a.sent());
|
||||||
|
return [2 /*return*/, Array.isArray(data) ? data : data.items];
|
||||||
|
case 5:
|
||||||
|
error_3 = _a.sent();
|
||||||
|
console.warn("[Explorer] Failed to fetch live address data", error_3);
|
||||||
|
return [2 /*return*/, []];
|
||||||
|
case 6: return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function fetchReceipts() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var data, response, data, error_4;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
if (!(getDataMode() === "mock")) return [3 /*break*/, 2];
|
||||||
|
return [4 /*yield*/, fetchMock("receipts")];
|
||||||
|
case 1:
|
||||||
|
data = _a.sent();
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 2:
|
||||||
|
_a.trys.push([2, 5, , 6]);
|
||||||
|
return [4 /*yield*/, fetch("".concat(config_1.CONFIG.apiBaseUrl, "/v1/receipts"))];
|
||||||
|
case 3:
|
||||||
|
response = _a.sent();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to fetch receipts: ".concat(response.status));
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, response.json()];
|
||||||
|
case 4:
|
||||||
|
data = (_a.sent());
|
||||||
|
return [2 /*return*/, data.items];
|
||||||
|
case 5:
|
||||||
|
error_4 = _a.sent();
|
||||||
|
console.warn("[Explorer] Failed to fetch live receipt data", error_4);
|
||||||
|
return [2 /*return*/, []];
|
||||||
|
case 6: return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function fetchMock(resource) {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var url, response, error_5;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
url = "".concat(config_1.CONFIG.mockBasePath, "/").concat(resource, ".json");
|
||||||
|
_a.label = 1;
|
||||||
|
case 1:
|
||||||
|
_a.trys.push([1, 4, , 5]);
|
||||||
|
return [4 /*yield*/, fetch(url)];
|
||||||
|
case 2:
|
||||||
|
response = _a.sent();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Request failed with status ".concat(response.status));
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, response.json()];
|
||||||
|
case 3: return [2 /*return*/, (_a.sent())];
|
||||||
|
case 4:
|
||||||
|
error_5 = _a.sent();
|
||||||
|
console.warn("[Explorer] Failed to fetch mock data from ".concat(url), error_5);
|
||||||
|
return [2 /*return*/, []];
|
||||||
|
case 5: return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
112
apps/explorer-web/src/lib/mockData.ts
Normal file
112
apps/explorer-web/src/lib/mockData.ts
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
import { CONFIG, type DataMode } from "../config";
|
||||||
|
import type {
|
||||||
|
BlockListResponse,
|
||||||
|
TransactionListResponse,
|
||||||
|
AddressDetailResponse,
|
||||||
|
ReceiptListResponse,
|
||||||
|
BlockSummary,
|
||||||
|
TransactionSummary,
|
||||||
|
AddressSummary,
|
||||||
|
ReceiptSummary,
|
||||||
|
} from "./models.ts";
|
||||||
|
|
||||||
|
let currentMode: DataMode = CONFIG.dataMode;
|
||||||
|
|
||||||
|
export function getDataMode(): DataMode {
|
||||||
|
return currentMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setDataMode(mode: DataMode): void {
|
||||||
|
currentMode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchBlocks(): Promise<BlockSummary[]> {
|
||||||
|
if (getDataMode() === "mock") {
|
||||||
|
const data = await fetchMock<BlockListResponse>("blocks");
|
||||||
|
return data.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${CONFIG.apiBaseUrl}/v1/blocks`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch blocks: ${response.status}`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as BlockListResponse;
|
||||||
|
return data.items;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("[Explorer] Failed to fetch live block data", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchTransactions(): Promise<TransactionSummary[]> {
|
||||||
|
if (getDataMode() === "mock") {
|
||||||
|
const data = await fetchMock<TransactionListResponse>("transactions");
|
||||||
|
return data.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${CONFIG.apiBaseUrl}/v1/transactions`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch transactions: ${response.status}`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as TransactionListResponse;
|
||||||
|
return data.items;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("[Explorer] Failed to fetch live transaction data", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchAddresses(): Promise<AddressSummary[]> {
|
||||||
|
if (getDataMode() === "mock") {
|
||||||
|
const data = await fetchMock<AddressDetailResponse | AddressDetailResponse[]>("addresses");
|
||||||
|
return Array.isArray(data) ? data : [data];
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${CONFIG.apiBaseUrl}/v1/addresses`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch addresses: ${response.status}`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as { items: AddressDetailResponse[] } | AddressDetailResponse[];
|
||||||
|
return Array.isArray(data) ? data : data.items;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("[Explorer] Failed to fetch live address data", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchReceipts(): Promise<ReceiptSummary[]> {
|
||||||
|
if (getDataMode() === "mock") {
|
||||||
|
const data = await fetchMock<ReceiptListResponse>("receipts");
|
||||||
|
return data.items;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${CONFIG.apiBaseUrl}/v1/receipts`);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch receipts: ${response.status}`);
|
||||||
|
}
|
||||||
|
const data = (await response.json()) as ReceiptListResponse;
|
||||||
|
return data.items;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn("[Explorer] Failed to fetch live receipt data", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchMock<T>(resource: string): Promise<T> {
|
||||||
|
const url = `${CONFIG.mockBasePath}/${resource}.json`;
|
||||||
|
try {
|
||||||
|
const response = await fetch(url);
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Request failed with status ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (await response.json()) as T;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`[Explorer] Failed to fetch mock data from ${url}`, error);
|
||||||
|
return [] as unknown as T;
|
||||||
|
}
|
||||||
|
}
|
||||||
2
apps/explorer-web/src/lib/models.js
Normal file
2
apps/explorer-web/src/lib/models.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
57
apps/explorer-web/src/lib/models.ts
Normal file
57
apps/explorer-web/src/lib/models.ts
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
export interface BlockSummary {
|
||||||
|
height: number;
|
||||||
|
hash: string;
|
||||||
|
timestamp: string;
|
||||||
|
txCount: number;
|
||||||
|
proposer: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BlockListResponse {
|
||||||
|
items: BlockSummary[];
|
||||||
|
next_offset?: number | string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TransactionSummary {
|
||||||
|
hash: string;
|
||||||
|
block: number | string;
|
||||||
|
from: string;
|
||||||
|
to: string | null;
|
||||||
|
value: string;
|
||||||
|
status: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TransactionListResponse {
|
||||||
|
items: TransactionSummary[];
|
||||||
|
next_offset?: number | string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AddressSummary {
|
||||||
|
address: string;
|
||||||
|
balance: string;
|
||||||
|
txCount: number;
|
||||||
|
lastActive: string;
|
||||||
|
recentTransactions?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AddressDetailResponse extends AddressSummary {}
|
||||||
|
export interface AddressListResponse {
|
||||||
|
items: AddressSummary[];
|
||||||
|
next_offset?: number | string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReceiptSummary {
|
||||||
|
receiptId: string;
|
||||||
|
miner: string;
|
||||||
|
coordinator: string;
|
||||||
|
issuedAt: string;
|
||||||
|
status: string;
|
||||||
|
payload?: {
|
||||||
|
minerSignature?: string;
|
||||||
|
coordinatorSignature?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReceiptListResponse {
|
||||||
|
jobId: string;
|
||||||
|
items: ReceiptSummary[];
|
||||||
|
}
|
||||||
63
apps/explorer-web/src/main.js
Normal file
63
apps/explorer-web/src/main.js
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
require("../public/css/theme.css");
|
||||||
|
require("../public/css/base.css");
|
||||||
|
require("../public/css/layout.css");
|
||||||
|
var siteHeader_1 = require("./components/siteHeader");
|
||||||
|
var siteFooter_1 = require("./components/siteFooter");
|
||||||
|
var overview_1 = require("./pages/overview");
|
||||||
|
var blocks_1 = require("./pages/blocks");
|
||||||
|
var transactions_1 = require("./pages/transactions");
|
||||||
|
var addresses_1 = require("./pages/addresses");
|
||||||
|
var receipts_1 = require("./pages/receipts");
|
||||||
|
var dataModeToggle_1 = require("./components/dataModeToggle");
|
||||||
|
var mockData_1 = require("./lib/mockData");
|
||||||
|
var overviewConfig = {
|
||||||
|
title: overview_1.overviewTitle,
|
||||||
|
render: overview_1.renderOverviewPage,
|
||||||
|
init: overview_1.initOverviewPage,
|
||||||
|
};
|
||||||
|
var routes = {
|
||||||
|
"/": overviewConfig,
|
||||||
|
"/index.html": overviewConfig,
|
||||||
|
"/blocks": {
|
||||||
|
title: blocks_1.blocksTitle,
|
||||||
|
render: blocks_1.renderBlocksPage,
|
||||||
|
init: blocks_1.initBlocksPage,
|
||||||
|
},
|
||||||
|
"/transactions": {
|
||||||
|
title: transactions_1.transactionsTitle,
|
||||||
|
render: transactions_1.renderTransactionsPage,
|
||||||
|
init: transactions_1.initTransactionsPage,
|
||||||
|
},
|
||||||
|
"/addresses": {
|
||||||
|
title: addresses_1.addressesTitle,
|
||||||
|
render: addresses_1.renderAddressesPage,
|
||||||
|
init: addresses_1.initAddressesPage,
|
||||||
|
},
|
||||||
|
"/receipts": {
|
||||||
|
title: receipts_1.receiptsTitle,
|
||||||
|
render: receipts_1.renderReceiptsPage,
|
||||||
|
init: receipts_1.initReceiptsPage,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
function render() {
|
||||||
|
var _a, _b, _c;
|
||||||
|
var root = document.querySelector("#app");
|
||||||
|
if (!root) {
|
||||||
|
console.warn("[Explorer] Missing #app root element");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
document.documentElement.dataset.mode = (0, mockData_1.getDataMode)();
|
||||||
|
var currentPath = window.location.pathname.replace(/\/$/, "");
|
||||||
|
var normalizedPath = currentPath === "" ? "/" : currentPath;
|
||||||
|
var page = (_a = routes[normalizedPath]) !== null && _a !== void 0 ? _a : null;
|
||||||
|
root.innerHTML = "\n ".concat((0, siteHeader_1.siteHeader)((_b = page === null || page === void 0 ? void 0 : page.title) !== null && _b !== void 0 ? _b : "Explorer"), "\n <main class=\"page\">").concat((page !== null && page !== void 0 ? page : notFoundPageConfig).render(), "</main>\n ").concat((0, siteFooter_1.siteFooter)(), "\n ");
|
||||||
|
(0, dataModeToggle_1.initDataModeToggle)(render);
|
||||||
|
void ((_c = page === null || page === void 0 ? void 0 : page.init) === null || _c === void 0 ? void 0 : _c.call(page));
|
||||||
|
}
|
||||||
|
var notFoundPageConfig = {
|
||||||
|
title: "Not Found",
|
||||||
|
render: function () { return "\n <section class=\"not-found\">\n <h2>Page Not Found</h2>\n <p>The requested view is not available yet.</p>\n </section>\n "; },
|
||||||
|
};
|
||||||
|
document.addEventListener("DOMContentLoaded", render);
|
||||||
84
apps/explorer-web/src/main.ts
Normal file
84
apps/explorer-web/src/main.ts
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import "../public/css/theme.css";
|
||||||
|
import "../public/css/base.css";
|
||||||
|
import "../public/css/layout.css";
|
||||||
|
import { siteHeader } from "./components/siteHeader";
|
||||||
|
import { siteFooter } from "./components/siteFooter";
|
||||||
|
import { overviewTitle, renderOverviewPage, initOverviewPage } from "./pages/overview";
|
||||||
|
import { blocksTitle, renderBlocksPage, initBlocksPage } from "./pages/blocks";
|
||||||
|
import { transactionsTitle, renderTransactionsPage, initTransactionsPage } from "./pages/transactions";
|
||||||
|
import { addressesTitle, renderAddressesPage, initAddressesPage } from "./pages/addresses";
|
||||||
|
import { receiptsTitle, renderReceiptsPage, initReceiptsPage } from "./pages/receipts";
|
||||||
|
import { initDataModeToggle } from "./components/dataModeToggle";
|
||||||
|
import { getDataMode } from "./lib/mockData";
|
||||||
|
|
||||||
|
type PageConfig = {
|
||||||
|
title: string;
|
||||||
|
render: () => string;
|
||||||
|
init?: () => void | Promise<void>;
|
||||||
|
};
|
||||||
|
|
||||||
|
const overviewConfig: PageConfig = {
|
||||||
|
title: overviewTitle,
|
||||||
|
render: renderOverviewPage,
|
||||||
|
init: initOverviewPage,
|
||||||
|
};
|
||||||
|
|
||||||
|
const routes: Record<string, PageConfig> = {
|
||||||
|
"/": overviewConfig,
|
||||||
|
"/index.html": overviewConfig,
|
||||||
|
"/blocks": {
|
||||||
|
title: blocksTitle,
|
||||||
|
render: renderBlocksPage,
|
||||||
|
init: initBlocksPage,
|
||||||
|
},
|
||||||
|
"/transactions": {
|
||||||
|
title: transactionsTitle,
|
||||||
|
render: renderTransactionsPage,
|
||||||
|
init: initTransactionsPage,
|
||||||
|
},
|
||||||
|
"/addresses": {
|
||||||
|
title: addressesTitle,
|
||||||
|
render: renderAddressesPage,
|
||||||
|
init: initAddressesPage,
|
||||||
|
},
|
||||||
|
"/receipts": {
|
||||||
|
title: receiptsTitle,
|
||||||
|
render: renderReceiptsPage,
|
||||||
|
init: initReceiptsPage,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function render(): void {
|
||||||
|
const root = document.querySelector<HTMLDivElement>("#app");
|
||||||
|
if (!root) {
|
||||||
|
console.warn("[Explorer] Missing #app root element");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
document.documentElement.dataset.mode = getDataMode();
|
||||||
|
|
||||||
|
const currentPath = window.location.pathname.replace(/\/$/, "");
|
||||||
|
const normalizedPath = currentPath === "" ? "/" : currentPath;
|
||||||
|
const page = routes[normalizedPath] ?? null;
|
||||||
|
|
||||||
|
root.innerHTML = `
|
||||||
|
${siteHeader(page?.title ?? "Explorer")}
|
||||||
|
<main class="page">${(page ?? notFoundPageConfig).render()}</main>
|
||||||
|
${siteFooter()}
|
||||||
|
`;
|
||||||
|
|
||||||
|
initDataModeToggle(render);
|
||||||
|
void page?.init?.();
|
||||||
|
}
|
||||||
|
|
||||||
|
const notFoundPageConfig: PageConfig = {
|
||||||
|
title: "Not Found",
|
||||||
|
render: () => `
|
||||||
|
<section class="not-found">
|
||||||
|
<h2>Page Not Found</h2>
|
||||||
|
<p>The requested view is not available yet.</p>
|
||||||
|
</section>
|
||||||
|
`,
|
||||||
|
};
|
||||||
|
|
||||||
|
document.addEventListener("DOMContentLoaded", render);
|
||||||
72
apps/explorer-web/src/pages/addresses.js
Normal file
72
apps/explorer-web/src/pages/addresses.js
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.addressesTitle = void 0;
|
||||||
|
exports.renderAddressesPage = renderAddressesPage;
|
||||||
|
exports.initAddressesPage = initAddressesPage;
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
exports.addressesTitle = "Addresses";
|
||||||
|
function renderAddressesPage() {
|
||||||
|
return "\n <section class=\"addresses\">\n <header class=\"section-header\">\n <h2>Address Lookup</h2>\n <p class=\"lead\">Enter an account address to view recent transactions, balances, and receipt history (mock results shown below).</p>\n </header>\n <form class=\"addresses__search\" aria-label=\"Search for an address\">\n <label class=\"addresses__label\" for=\"address-input\">Address</label>\n <div class=\"addresses__input-group\">\n <input id=\"address-input\" name=\"address\" type=\"search\" placeholder=\"0x...\" disabled />\n <button type=\"submit\" disabled>Search</button>\n </div>\n <p class=\"placeholder\">Searching will be enabled after integrating the coordinator/blockchain node endpoints.</p>\n </form>\n <section class=\"addresses__details\">\n <h3>Recent Activity</h3>\n <table class=\"table addresses__table\">\n <thead>\n <tr>\n <th scope=\"col\">Address</th>\n <th scope=\"col\">Balance</th>\n <th scope=\"col\">Tx Count</th>\n <th scope=\"col\">Last Active</th>\n </tr>\n </thead>\n <tbody id=\"addresses-table-body\">\n <tr>\n <td class=\"placeholder\" colspan=\"4\">Loading addresses\u2026</td>\n </tr>\n </tbody>\n </table>\n </section>\n </section>\n ";
|
||||||
|
}
|
||||||
|
function initAddressesPage() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var tbody, addresses;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
tbody = document.querySelector("#addresses-table-body");
|
||||||
|
if (!tbody) {
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, (0, mockData_1.fetchAddresses)()];
|
||||||
|
case 1:
|
||||||
|
addresses = _a.sent();
|
||||||
|
if (addresses.length === 0) {
|
||||||
|
tbody.innerHTML = "\n <tr>\n <td class=\"placeholder\" colspan=\"4\">No mock addresses available.</td>\n </tr>\n ";
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
tbody.innerHTML = addresses.map(renderAddressRow).join("");
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function renderAddressRow(address) {
|
||||||
|
return "\n <tr>\n <td><code>".concat(address.address, "</code></td>\n <td>").concat(address.balance, "</td>\n <td>").concat(address.txCount, "</td>\n <td>").concat(new Date(address.lastActive).toLocaleString(), "</td>\n </tr>\n ");
|
||||||
|
}
|
||||||
72
apps/explorer-web/src/pages/addresses.ts
Normal file
72
apps/explorer-web/src/pages/addresses.ts
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import { fetchAddresses, type AddressSummary } from "../lib/mockData";
|
||||||
|
|
||||||
|
export const addressesTitle = "Addresses";
|
||||||
|
|
||||||
|
export function renderAddressesPage(): string {
|
||||||
|
return `
|
||||||
|
<section class="addresses">
|
||||||
|
<header class="section-header">
|
||||||
|
<h2>Address Lookup</h2>
|
||||||
|
<p class="lead">Enter an account address to view recent transactions, balances, and receipt history (mock results shown below).</p>
|
||||||
|
</header>
|
||||||
|
<form class="addresses__search" aria-label="Search for an address">
|
||||||
|
<label class="addresses__label" for="address-input">Address</label>
|
||||||
|
<div class="addresses__input-group">
|
||||||
|
<input id="address-input" name="address" type="search" placeholder="0x..." disabled />
|
||||||
|
<button type="submit" disabled>Search</button>
|
||||||
|
</div>
|
||||||
|
<p class="placeholder">Searching will be enabled after integrating the coordinator/blockchain node endpoints.</p>
|
||||||
|
</form>
|
||||||
|
<section class="addresses__details">
|
||||||
|
<h3>Recent Activity</h3>
|
||||||
|
<table class="table addresses__table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Address</th>
|
||||||
|
<th scope="col">Balance</th>
|
||||||
|
<th scope="col">Tx Count</th>
|
||||||
|
<th scope="col">Last Active</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="addresses-table-body">
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="4">Loading addresses…</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initAddressesPage(): Promise<void> {
|
||||||
|
const tbody = document.querySelector<HTMLTableSectionElement>(
|
||||||
|
"#addresses-table-body",
|
||||||
|
);
|
||||||
|
if (!tbody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const addresses = await fetchAddresses();
|
||||||
|
if (addresses.length === 0) {
|
||||||
|
tbody.innerHTML = `
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="4">No mock addresses available.</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = addresses.map(renderAddressRow).join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderAddressRow(address: AddressSummary): string {
|
||||||
|
return `
|
||||||
|
<tr>
|
||||||
|
<td><code>${address.address}</code></td>
|
||||||
|
<td>${address.balance}</td>
|
||||||
|
<td>${address.txCount}</td>
|
||||||
|
<td>${new Date(address.lastActive).toLocaleString()}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
}
|
||||||
74
apps/explorer-web/src/pages/blocks.js
Normal file
74
apps/explorer-web/src/pages/blocks.js
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.blocksTitle = void 0;
|
||||||
|
exports.renderBlocksPage = renderBlocksPage;
|
||||||
|
exports.initBlocksPage = initBlocksPage;
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
exports.blocksTitle = "Blocks";
|
||||||
|
function renderBlocksPage() {
|
||||||
|
return "\n <section class=\"blocks\">\n <header class=\"section-header\">\n <h2>Recent Blocks</h2>\n <p class=\"lead\">This view lists blocks pulled from the coordinator or blockchain node (mock data shown for now).</p>\n </header>\n <table class=\"table blocks__table\">\n <thead>\n <tr>\n <th scope=\"col\">Height</th>\n <th scope=\"col\">Block Hash</th>\n <th scope=\"col\">Timestamp</th>\n <th scope=\"col\">Tx Count</th>\n <th scope=\"col\">Proposer</th>\n </tr>\n </thead>\n <tbody id=\"blocks-table-body\">\n <tr>\n <td class=\"placeholder\" colspan=\"5\">Loading blocks\u2026</td>\n </tr>\n </tbody>\n </table>\n </section>\n ";
|
||||||
|
}
|
||||||
|
function initBlocksPage() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var tbody, blocks;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
tbody = document.querySelector("#blocks-table-body");
|
||||||
|
if (!tbody) {
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, (0, mockData_1.fetchBlocks)()];
|
||||||
|
case 1:
|
||||||
|
blocks = _a.sent();
|
||||||
|
if (blocks.length === 0) {
|
||||||
|
tbody.innerHTML = "\n <tr>\n <td class=\"placeholder\" colspan=\"5\">No mock blocks available.</td>\n </tr>\n ";
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
tbody.innerHTML = blocks
|
||||||
|
.map(function (block) { return renderBlockRow(block); })
|
||||||
|
.join("");
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function renderBlockRow(block) {
|
||||||
|
return "\n <tr>\n <td>".concat(block.height, "</td>\n <td><code>").concat(block.hash.slice(0, 18), "\u2026</code></td>\n <td>").concat(new Date(block.timestamp).toLocaleString(), "</td>\n <td>").concat(block.txCount, "</td>\n <td>").concat(block.proposer, "</td>\n </tr>\n ");
|
||||||
|
}
|
||||||
65
apps/explorer-web/src/pages/blocks.ts
Normal file
65
apps/explorer-web/src/pages/blocks.ts
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import { fetchBlocks, type BlockSummary } from "../lib/mockData";
|
||||||
|
|
||||||
|
export const blocksTitle = "Blocks";
|
||||||
|
|
||||||
|
export function renderBlocksPage(): string {
|
||||||
|
return `
|
||||||
|
<section class="blocks">
|
||||||
|
<header class="section-header">
|
||||||
|
<h2>Recent Blocks</h2>
|
||||||
|
<p class="lead">This view lists blocks pulled from the coordinator or blockchain node (mock data shown for now).</p>
|
||||||
|
</header>
|
||||||
|
<table class="table blocks__table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Height</th>
|
||||||
|
<th scope="col">Block Hash</th>
|
||||||
|
<th scope="col">Timestamp</th>
|
||||||
|
<th scope="col">Tx Count</th>
|
||||||
|
<th scope="col">Proposer</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="blocks-table-body">
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="5">Loading blocks…</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</section>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initBlocksPage(): Promise<void> {
|
||||||
|
const tbody = document.querySelector<HTMLTableSectionElement>(
|
||||||
|
"#blocks-table-body",
|
||||||
|
);
|
||||||
|
if (!tbody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = await fetchBlocks();
|
||||||
|
if (blocks.length === 0) {
|
||||||
|
tbody.innerHTML = `
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="5">No mock blocks available.</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = blocks
|
||||||
|
.map((block) => renderBlockRow(block))
|
||||||
|
.join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderBlockRow(block: BlockSummary): string {
|
||||||
|
return `
|
||||||
|
<tr>
|
||||||
|
<td>${block.height}</td>
|
||||||
|
<td><code>${block.hash.slice(0, 18)}…</code></td>
|
||||||
|
<td>${new Date(block.timestamp).toLocaleString()}</td>
|
||||||
|
<td>${block.txCount}</td>
|
||||||
|
<td>${block.proposer}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
}
|
||||||
93
apps/explorer-web/src/pages/overview.js
Normal file
93
apps/explorer-web/src/pages/overview.js
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.overviewTitle = void 0;
|
||||||
|
exports.renderOverviewPage = renderOverviewPage;
|
||||||
|
exports.initOverviewPage = initOverviewPage;
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
exports.overviewTitle = "Network Overview";
|
||||||
|
function renderOverviewPage() {
|
||||||
|
return "\n <section class=\"overview\">\n <p class=\"lead\">High-level summaries of recent blocks, transactions, and receipts will appear here.</p>\n <div class=\"overview__grid\">\n <article class=\"card\">\n <h3>Latest Block</h3>\n <ul class=\"stat-list\" id=\"overview-block-stats\">\n <li class=\"placeholder\">Loading block data\u2026</li>\n </ul>\n </article>\n <article class=\"card\">\n <h3>Recent Transactions</h3>\n <ul class=\"stat-list\" id=\"overview-transaction-stats\">\n <li class=\"placeholder\">Loading transaction data\u2026</li>\n </ul>\n </article>\n <article class=\"card\">\n <h3>Receipt Metrics</h3>\n <ul class=\"stat-list\" id=\"overview-receipt-stats\">\n <li class=\"placeholder\">Loading receipt data\u2026</li>\n </ul>\n </article>\n </div>\n </section>\n ";
|
||||||
|
}
|
||||||
|
function initOverviewPage() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var _a, blocks, transactions, receipts, blockStats, latest, txStats, succeeded, receiptStats, attested;
|
||||||
|
return __generator(this, function (_b) {
|
||||||
|
switch (_b.label) {
|
||||||
|
case 0: return [4 /*yield*/, Promise.all([
|
||||||
|
(0, mockData_1.fetchBlocks)(),
|
||||||
|
(0, mockData_1.fetchTransactions)(),
|
||||||
|
(0, mockData_1.fetchReceipts)(),
|
||||||
|
])];
|
||||||
|
case 1:
|
||||||
|
_a = _b.sent(), blocks = _a[0], transactions = _a[1], receipts = _a[2];
|
||||||
|
blockStats = document.querySelector("#overview-block-stats");
|
||||||
|
if (blockStats) {
|
||||||
|
if (blocks.length > 0) {
|
||||||
|
latest = blocks[0];
|
||||||
|
blockStats.innerHTML = "\n <li><strong>Height:</strong> ".concat(latest.height, "</li>\n <li><strong>Hash:</strong> ").concat(latest.hash.slice(0, 18), "\u2026</li>\n <li><strong>Proposer:</strong> ").concat(latest.proposer, "</li>\n <li><strong>Time:</strong> ").concat(new Date(latest.timestamp).toLocaleString(), "</li>\n ");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
blockStats.innerHTML = "<li class=\"placeholder\">No mock block data available.</li>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
txStats = document.querySelector("#overview-transaction-stats");
|
||||||
|
if (txStats) {
|
||||||
|
if (transactions.length > 0) {
|
||||||
|
succeeded = transactions.filter(function (tx) { return tx.status === "Succeeded"; });
|
||||||
|
txStats.innerHTML = "\n <li><strong>Total Mock Tx:</strong> ".concat(transactions.length, "</li>\n <li><strong>Succeeded:</strong> ").concat(succeeded.length, "</li>\n <li><strong>Pending:</strong> ").concat(transactions.length - succeeded.length, "</li>\n ");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
txStats.innerHTML = "<li class=\"placeholder\">No mock transaction data available.</li>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
receiptStats = document.querySelector("#overview-receipt-stats");
|
||||||
|
if (receiptStats) {
|
||||||
|
if (receipts.length > 0) {
|
||||||
|
attested = receipts.filter(function (receipt) { return receipt.status === "Attested"; });
|
||||||
|
receiptStats.innerHTML = "\n <li><strong>Total Receipts:</strong> ".concat(receipts.length, "</li>\n <li><strong>Attested:</strong> ").concat(attested.length, "</li>\n <li><strong>Pending:</strong> ").concat(receipts.length - attested.length, "</li>\n ");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
receiptStats.innerHTML = "<li class=\"placeholder\">No mock receipt data available.</li>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
92
apps/explorer-web/src/pages/overview.ts
Normal file
92
apps/explorer-web/src/pages/overview.ts
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
import {
|
||||||
|
fetchBlocks,
|
||||||
|
fetchTransactions,
|
||||||
|
fetchReceipts,
|
||||||
|
} from "../lib/mockData";
|
||||||
|
|
||||||
|
export const overviewTitle = "Network Overview";
|
||||||
|
|
||||||
|
export function renderOverviewPage(): string {
|
||||||
|
return `
|
||||||
|
<section class="overview">
|
||||||
|
<p class="lead">High-level summaries of recent blocks, transactions, and receipts will appear here.</p>
|
||||||
|
<div class="overview__grid">
|
||||||
|
<article class="card">
|
||||||
|
<h3>Latest Block</h3>
|
||||||
|
<ul class="stat-list" id="overview-block-stats">
|
||||||
|
<li class="placeholder">Loading block data…</li>
|
||||||
|
</ul>
|
||||||
|
</article>
|
||||||
|
<article class="card">
|
||||||
|
<h3>Recent Transactions</h3>
|
||||||
|
<ul class="stat-list" id="overview-transaction-stats">
|
||||||
|
<li class="placeholder">Loading transaction data…</li>
|
||||||
|
</ul>
|
||||||
|
</article>
|
||||||
|
<article class="card">
|
||||||
|
<h3>Receipt Metrics</h3>
|
||||||
|
<ul class="stat-list" id="overview-receipt-stats">
|
||||||
|
<li class="placeholder">Loading receipt data…</li>
|
||||||
|
</ul>
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initOverviewPage(): Promise<void> {
|
||||||
|
const [blocks, transactions, receipts] = await Promise.all([
|
||||||
|
fetchBlocks(),
|
||||||
|
fetchTransactions(),
|
||||||
|
fetchReceipts(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const blockStats = document.querySelector<HTMLUListElement>(
|
||||||
|
"#overview-block-stats",
|
||||||
|
);
|
||||||
|
if (blockStats) {
|
||||||
|
if (blocks.length > 0) {
|
||||||
|
const latest = blocks[0];
|
||||||
|
blockStats.innerHTML = `
|
||||||
|
<li><strong>Height:</strong> ${latest.height}</li>
|
||||||
|
<li><strong>Hash:</strong> ${latest.hash.slice(0, 18)}…</li>
|
||||||
|
<li><strong>Proposer:</strong> ${latest.proposer}</li>
|
||||||
|
<li><strong>Time:</strong> ${new Date(latest.timestamp).toLocaleString()}</li>
|
||||||
|
`;
|
||||||
|
} else {
|
||||||
|
blockStats.innerHTML = `<li class="placeholder">No mock block data available.</li>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const txStats = document.querySelector<HTMLUListElement>(
|
||||||
|
"#overview-transaction-stats",
|
||||||
|
);
|
||||||
|
if (txStats) {
|
||||||
|
if (transactions.length > 0) {
|
||||||
|
const succeeded = transactions.filter((tx) => tx.status === "Succeeded");
|
||||||
|
txStats.innerHTML = `
|
||||||
|
<li><strong>Total Mock Tx:</strong> ${transactions.length}</li>
|
||||||
|
<li><strong>Succeeded:</strong> ${succeeded.length}</li>
|
||||||
|
<li><strong>Pending:</strong> ${transactions.length - succeeded.length}</li>
|
||||||
|
`;
|
||||||
|
} else {
|
||||||
|
txStats.innerHTML = `<li class="placeholder">No mock transaction data available.</li>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const receiptStats = document.querySelector<HTMLUListElement>(
|
||||||
|
"#overview-receipt-stats",
|
||||||
|
);
|
||||||
|
if (receiptStats) {
|
||||||
|
if (receipts.length > 0) {
|
||||||
|
const attested = receipts.filter((receipt) => receipt.status === "Attested");
|
||||||
|
receiptStats.innerHTML = `
|
||||||
|
<li><strong>Total Receipts:</strong> ${receipts.length}</li>
|
||||||
|
<li><strong>Attested:</strong> ${attested.length}</li>
|
||||||
|
<li><strong>Pending:</strong> ${receipts.length - attested.length}</li>
|
||||||
|
`;
|
||||||
|
} else {
|
||||||
|
receiptStats.innerHTML = `<li class="placeholder">No mock receipt data available.</li>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
72
apps/explorer-web/src/pages/receipts.js
Normal file
72
apps/explorer-web/src/pages/receipts.js
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.receiptsTitle = void 0;
|
||||||
|
exports.renderReceiptsPage = renderReceiptsPage;
|
||||||
|
exports.initReceiptsPage = initReceiptsPage;
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
exports.receiptsTitle = "Receipts";
|
||||||
|
function renderReceiptsPage() {
|
||||||
|
return "\n <section class=\"receipts\">\n <header class=\"section-header\">\n <h2>Receipt History</h2>\n <p class=\"lead\">Mock receipts from the coordinator history are displayed below; live lookup will arrive with API wiring.</p>\n </header>\n <div class=\"receipts__controls\">\n <label class=\"receipts__label\" for=\"job-id-input\">Job ID</label>\n <div class=\"receipts__input-group\">\n <input id=\"job-id-input\" name=\"jobId\" type=\"search\" placeholder=\"Enter job ID\" disabled />\n <button type=\"button\" disabled>Lookup</button>\n </div>\n <p class=\"placeholder\">Receipt lookup will be enabled after wiring to <code>/v1/jobs/{job_id}/receipts</code>.</p>\n </div>\n <section class=\"receipts__list\">\n <h3>Recent Receipts</h3>\n <table class=\"table receipts__table\">\n <thead>\n <tr>\n <th scope=\"col\">Job ID</th>\n <th scope=\"col\">Receipt ID</th>\n <th scope=\"col\">Miner</th>\n <th scope=\"col\">Coordinator</th>\n <th scope=\"col\">Issued</th>\n <th scope=\"col\">Status</th>\n </tr>\n </thead>\n <tbody id=\"receipts-table-body\">\n <tr>\n <td class=\"placeholder\" colspan=\"6\">Loading receipts\u2026</td>\n </tr>\n </tbody>\n </table>\n </section>\n </section>\n ";
|
||||||
|
}
|
||||||
|
function initReceiptsPage() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var tbody, receipts;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
tbody = document.querySelector("#receipts-table-body");
|
||||||
|
if (!tbody) {
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, (0, mockData_1.fetchReceipts)()];
|
||||||
|
case 1:
|
||||||
|
receipts = _a.sent();
|
||||||
|
if (receipts.length === 0) {
|
||||||
|
tbody.innerHTML = "\n <tr>\n <td class=\"placeholder\" colspan=\"6\">No mock receipts available.</td>\n </tr>\n ";
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
tbody.innerHTML = receipts.map(renderReceiptRow).join("");
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function renderReceiptRow(receipt) {
|
||||||
|
return "\n <tr>\n <td><code>".concat(receipt.jobId, "</code></td>\n <td><code>").concat(receipt.receiptId, "</code></td>\n <td>").concat(receipt.miner, "</td>\n <td>").concat(receipt.coordinator, "</td>\n <td>").concat(new Date(receipt.issuedAt).toLocaleString(), "</td>\n <td>").concat(receipt.status, "</td>\n </tr>\n ");
|
||||||
|
}
|
||||||
76
apps/explorer-web/src/pages/receipts.ts
Normal file
76
apps/explorer-web/src/pages/receipts.ts
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
import { fetchReceipts, type ReceiptSummary } from "../lib/mockData";
|
||||||
|
|
||||||
|
export const receiptsTitle = "Receipts";
|
||||||
|
|
||||||
|
export function renderReceiptsPage(): string {
|
||||||
|
return `
|
||||||
|
<section class="receipts">
|
||||||
|
<header class="section-header">
|
||||||
|
<h2>Receipt History</h2>
|
||||||
|
<p class="lead">Mock receipts from the coordinator history are displayed below; live lookup will arrive with API wiring.</p>
|
||||||
|
</header>
|
||||||
|
<div class="receipts__controls">
|
||||||
|
<label class="receipts__label" for="job-id-input">Job ID</label>
|
||||||
|
<div class="receipts__input-group">
|
||||||
|
<input id="job-id-input" name="jobId" type="search" placeholder="Enter job ID" disabled />
|
||||||
|
<button type="button" disabled>Lookup</button>
|
||||||
|
</div>
|
||||||
|
<p class="placeholder">Receipt lookup will be enabled after wiring to <code>/v1/jobs/{job_id}/receipts</code>.</p>
|
||||||
|
</div>
|
||||||
|
<section class="receipts__list">
|
||||||
|
<h3>Recent Receipts</h3>
|
||||||
|
<table class="table receipts__table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Job ID</th>
|
||||||
|
<th scope="col">Receipt ID</th>
|
||||||
|
<th scope="col">Miner</th>
|
||||||
|
<th scope="col">Coordinator</th>
|
||||||
|
<th scope="col">Issued</th>
|
||||||
|
<th scope="col">Status</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="receipts-table-body">
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="6">Loading receipts…</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initReceiptsPage(): Promise<void> {
|
||||||
|
const tbody = document.querySelector<HTMLTableSectionElement>(
|
||||||
|
"#receipts-table-body",
|
||||||
|
);
|
||||||
|
if (!tbody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const receipts = await fetchReceipts();
|
||||||
|
if (receipts.length === 0) {
|
||||||
|
tbody.innerHTML = `
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="6">No mock receipts available.</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = receipts.map(renderReceiptRow).join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderReceiptRow(receipt: ReceiptSummary): string {
|
||||||
|
return `
|
||||||
|
<tr>
|
||||||
|
<td><code>${receipt.jobId}</code></td>
|
||||||
|
<td><code>${receipt.receiptId}</code></td>
|
||||||
|
<td>${receipt.miner}</td>
|
||||||
|
<td>${receipt.coordinator}</td>
|
||||||
|
<td>${new Date(receipt.issuedAt).toLocaleString()}</td>
|
||||||
|
<td>${receipt.status}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
}
|
||||||
72
apps/explorer-web/src/pages/transactions.js
Normal file
72
apps/explorer-web/src/pages/transactions.js
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
"use strict";
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype);
|
||||||
|
return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.transactionsTitle = void 0;
|
||||||
|
exports.renderTransactionsPage = renderTransactionsPage;
|
||||||
|
exports.initTransactionsPage = initTransactionsPage;
|
||||||
|
var mockData_1 = require("../lib/mockData");
|
||||||
|
exports.transactionsTitle = "Transactions";
|
||||||
|
function renderTransactionsPage() {
|
||||||
|
return "\n <section class=\"transactions\">\n <header class=\"section-header\">\n <h2>Recent Transactions</h2>\n <p class=\"lead\">Mock data is shown below until coordinator or node APIs are wired up.</p>\n </header>\n <table class=\"table transactions__table\">\n <thead>\n <tr>\n <th scope=\"col\">Hash</th>\n <th scope=\"col\">Block</th>\n <th scope=\"col\">From</th>\n <th scope=\"col\">To</th>\n <th scope=\"col\">Value</th>\n <th scope=\"col\">Status</th>\n </tr>\n </thead>\n <tbody id=\"transactions-table-body\">\n <tr>\n <td class=\"placeholder\" colspan=\"6\">Loading transactions\u2026</td>\n </tr>\n </tbody>\n </table>\n </section>\n ";
|
||||||
|
}
|
||||||
|
function initTransactionsPage() {
|
||||||
|
return __awaiter(this, void 0, void 0, function () {
|
||||||
|
var tbody, transactions;
|
||||||
|
return __generator(this, function (_a) {
|
||||||
|
switch (_a.label) {
|
||||||
|
case 0:
|
||||||
|
tbody = document.querySelector("#transactions-table-body");
|
||||||
|
if (!tbody) {
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
return [4 /*yield*/, (0, mockData_1.fetchTransactions)()];
|
||||||
|
case 1:
|
||||||
|
transactions = _a.sent();
|
||||||
|
if (transactions.length === 0) {
|
||||||
|
tbody.innerHTML = "\n <tr>\n <td class=\"placeholder\" colspan=\"6\">No mock transactions available.</td>\n </tr>\n ";
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
tbody.innerHTML = transactions.map(renderTransactionRow).join("");
|
||||||
|
return [2 /*return*/];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function renderTransactionRow(tx) {
|
||||||
|
return "\n <tr>\n <td><code>".concat(tx.hash.slice(0, 18), "\u2026</code></td>\n <td>").concat(tx.block, "</td>\n <td><code>").concat(tx.from.slice(0, 12), "\u2026</code></td>\n <td><code>").concat(tx.to.slice(0, 12), "\u2026</code></td>\n <td>").concat(tx.value, "</td>\n <td>").concat(tx.status, "</td>\n </tr>\n ");
|
||||||
|
}
|
||||||
68
apps/explorer-web/src/pages/transactions.ts
Normal file
68
apps/explorer-web/src/pages/transactions.ts
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import {
|
||||||
|
fetchTransactions,
|
||||||
|
type TransactionSummary,
|
||||||
|
} from "../lib/mockData";
|
||||||
|
|
||||||
|
export const transactionsTitle = "Transactions";
|
||||||
|
|
||||||
|
export function renderTransactionsPage(): string {
|
||||||
|
return `
|
||||||
|
<section class="transactions">
|
||||||
|
<header class="section-header">
|
||||||
|
<h2>Recent Transactions</h2>
|
||||||
|
<p class="lead">Mock data is shown below until coordinator or node APIs are wired up.</p>
|
||||||
|
</header>
|
||||||
|
<table class="table transactions__table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Hash</th>
|
||||||
|
<th scope="col">Block</th>
|
||||||
|
<th scope="col">From</th>
|
||||||
|
<th scope="col">To</th>
|
||||||
|
<th scope="col">Value</th>
|
||||||
|
<th scope="col">Status</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="transactions-table-body">
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="6">Loading transactions…</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</section>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function initTransactionsPage(): Promise<void> {
|
||||||
|
const tbody = document.querySelector<HTMLTableSectionElement>(
|
||||||
|
"#transactions-table-body",
|
||||||
|
);
|
||||||
|
if (!tbody) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const transactions = await fetchTransactions();
|
||||||
|
if (transactions.length === 0) {
|
||||||
|
tbody.innerHTML = `
|
||||||
|
<tr>
|
||||||
|
<td class="placeholder" colspan="6">No mock transactions available.</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = transactions.map(renderTransactionRow).join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderTransactionRow(tx: TransactionSummary): string {
|
||||||
|
return `
|
||||||
|
<tr>
|
||||||
|
<td><code>${tx.hash.slice(0, 18)}…</code></td>
|
||||||
|
<td>${tx.block}</td>
|
||||||
|
<td><code>${tx.from.slice(0, 12)}…</code></td>
|
||||||
|
<td><code>${tx.to.slice(0, 12)}…</code></td>
|
||||||
|
<td>${tx.value}</td>
|
||||||
|
<td>${tx.status}</td>
|
||||||
|
</tr>
|
||||||
|
`;
|
||||||
|
}
|
||||||
14
apps/explorer-web/tsconfig.json
Normal file
14
apps/explorer-web/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ESNext",
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "Node",
|
||||||
|
"strict": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"lib": ["ESNext", "DOM"],
|
||||||
|
"types": ["vite/client"]
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
||||||
7
apps/explorer-web/vite.config.ts
Normal file
7
apps/explorer-web/vite.config.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { defineConfig } from "vite";
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
server: {
|
||||||
|
port: 4173,
|
||||||
|
},
|
||||||
|
});
|
||||||
15
apps/marketplace-web/README.md
Normal file
15
apps/marketplace-web/README.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Marketplace Web
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
Vite-powered vanilla TypeScript app for listing compute offers, placing bids, and showing market analytics. Follow the implementation blueprint in `docs/bootstrap/marketplace_web.md`.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
- Install dependencies with `npm install` once `package.json` is defined.
|
||||||
|
- Run the dev server via `npm run dev`.
|
||||||
|
- Build for production with `npm run build` and preview using `npm run preview`.
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
Works against mock API responses initially; switch to real coordinator/pool-hub endpoints later via `VITE_API_BASE`.
|
||||||
27
apps/miner-node/README.md
Normal file
27
apps/miner-node/README.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Miner Node
|
||||||
|
|
||||||
|
## Purpose & Scope
|
||||||
|
|
||||||
|
Worker daemon responsible for executing compute jobs on CPU/GPU hardware, reporting telemetry, and submitting proofs back to the coordinator. See `docs/bootstrap/miner_node.md` for the detailed implementation roadmap.
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
- Create a Python virtual environment under `apps/miner-node/.venv`.
|
||||||
|
- Install dependencies (FastAPI optional for health endpoint, `httpx`, `pydantic`, `psutil`).
|
||||||
|
- Implement the package structure described in the bootstrap guide.
|
||||||
|
|
||||||
|
## Production Deployment (systemd)
|
||||||
|
|
||||||
|
1. Copy the project to `/opt/aitbc/apps/miner-node/` on the target host.
|
||||||
|
2. Create a virtual environment and install dependencies as needed.
|
||||||
|
3. Populate `.env` with coordinator URL/API token settings.
|
||||||
|
4. Run the installer script from repo root:
|
||||||
|
```bash
|
||||||
|
sudo scripts/ops/install_miner_systemd.sh
|
||||||
|
```
|
||||||
|
This installs `configs/systemd/aitbc-miner.service`, reloads systemd, and enables the service.
|
||||||
|
5. Check status/logs:
|
||||||
|
```bash
|
||||||
|
sudo systemctl status aitbc-miner
|
||||||
|
journalctl -u aitbc-miner -f
|
||||||
|
```
|
||||||
30
apps/miner-node/pyproject.toml
Normal file
30
apps/miner-node/pyproject.toml
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
[tool.poetry]
|
||||||
|
name = "aitbc-miner-node"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "AITBC miner node daemon"
|
||||||
|
authors = ["AITBC Team"]
|
||||||
|
packages = [
|
||||||
|
{ include = "aitbc_miner", from = "src" }
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.11"
|
||||||
|
httpx = "^0.27.0"
|
||||||
|
pydantic = "^2.7.0"
|
||||||
|
pyyaml = "^6.0.1"
|
||||||
|
psutil = "^5.9.8"
|
||||||
|
aiosignal = "^1.3.1"
|
||||||
|
uvloop = { version = "^0.19.0", optional = true }
|
||||||
|
asyncio = { version = "^3.4.3", optional = true }
|
||||||
|
rich = "^13.7.1"
|
||||||
|
|
||||||
|
[tool.poetry.extras]
|
||||||
|
uvloop = ["uvloop"]
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
pytest = "^8.2.0"
|
||||||
|
pytest-asyncio = "^0.23.0"
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
1
apps/miner-node/src/aitbc_miner/__init__.py
Normal file
1
apps/miner-node/src/aitbc_miner/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""AITBC miner node package."""
|
||||||
1
apps/miner-node/src/aitbc_miner/agent/__init__.py
Normal file
1
apps/miner-node/src/aitbc_miner/agent/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""Control loop and background tasks for the miner node."""
|
||||||
127
apps/miner-node/src/aitbc_miner/agent/control.py
Normal file
127
apps/miner-node/src/aitbc_miner/agent/control.py
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import json
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from ..config import settings
|
||||||
|
from ..logging import get_logger
|
||||||
|
from ..coordinator import CoordinatorClient
|
||||||
|
from ..util.probe import collect_capabilities, collect_runtime_metrics
|
||||||
|
from ..util.backoff import compute_backoff
|
||||||
|
from ..util.fs import ensure_workspace, write_json
|
||||||
|
from ..runners import get_runner
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MinerControlLoop:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._tasks: list[asyncio.Task[None]] = []
|
||||||
|
self._stop_event = asyncio.Event()
|
||||||
|
self._coordinator = CoordinatorClient()
|
||||||
|
self._capabilities_snapshot = collect_capabilities(settings.max_concurrent_cpu, settings.max_concurrent_gpu)
|
||||||
|
self._current_backoff = settings.poll_interval_seconds
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
logger.info("Starting miner control loop", extra={"node_id": settings.node_id})
|
||||||
|
await self._register()
|
||||||
|
self._tasks.append(asyncio.create_task(self._heartbeat_loop()))
|
||||||
|
self._tasks.append(asyncio.create_task(self._poll_loop()))
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
logger.info("Stopping miner control loop")
|
||||||
|
self._stop_event.set()
|
||||||
|
for task in self._tasks:
|
||||||
|
task.cancel()
|
||||||
|
await asyncio.gather(*self._tasks, return_exceptions=True)
|
||||||
|
await self._coordinator.aclose()
|
||||||
|
|
||||||
|
async def _register(self) -> None:
|
||||||
|
payload = {
|
||||||
|
"capabilities": self._capabilities_snapshot.capabilities,
|
||||||
|
"concurrency": self._capabilities_snapshot.concurrency,
|
||||||
|
"region": settings.region,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
resp = await self._coordinator.register(payload)
|
||||||
|
logger.info("Registered miner", extra={"resp": resp})
|
||||||
|
except Exception as exc:
|
||||||
|
logger.exception("Failed to register miner", exc_info=exc)
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _heartbeat_loop(self) -> None:
|
||||||
|
interval = settings.heartbeat_interval_seconds
|
||||||
|
while not self._stop_event.is_set():
|
||||||
|
payload = {
|
||||||
|
"inflight": 0,
|
||||||
|
"status": "ONLINE",
|
||||||
|
"metadata": collect_runtime_metrics(),
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
await self._coordinator.heartbeat(payload)
|
||||||
|
logger.debug("heartbeat sent")
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("heartbeat failed", exc_info=exc)
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
|
||||||
|
async def _poll_loop(self) -> None:
|
||||||
|
interval = settings.poll_interval_seconds
|
||||||
|
while not self._stop_event.is_set():
|
||||||
|
payload = {"max_wait_seconds": interval}
|
||||||
|
try:
|
||||||
|
job = await self._coordinator.poll(payload)
|
||||||
|
if job:
|
||||||
|
logger.info("received job", extra={"job_id": job.get("job_id")})
|
||||||
|
self._current_backoff = settings.poll_interval_seconds
|
||||||
|
await self._handle_job(job)
|
||||||
|
else:
|
||||||
|
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
|
||||||
|
logger.debug("no job; next poll interval=%s", interval)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("poll failed", exc_info=exc)
|
||||||
|
interval = min(compute_backoff(interval, 2.0, settings.heartbeat_jitter_pct, settings.max_backoff_seconds), settings.max_backoff_seconds)
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
|
||||||
|
async def _handle_job(self, job: dict) -> None:
|
||||||
|
job_id = job.get("job_id", "unknown")
|
||||||
|
workspace = ensure_workspace(settings.workspace_root, job_id)
|
||||||
|
runner_kind = job.get("runner", {}).get("kind", "noop")
|
||||||
|
runner = get_runner(runner_kind)
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await runner.run(job, workspace)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.exception("runner crashed", extra={"job_id": job_id, "runner": runner_kind})
|
||||||
|
await self._coordinator.submit_failure(
|
||||||
|
job_id,
|
||||||
|
{
|
||||||
|
"error_code": "RUNTIME_ERROR",
|
||||||
|
"error_message": str(exc),
|
||||||
|
"metrics": {},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if result.ok:
|
||||||
|
write_json(workspace / "result.json", result.output)
|
||||||
|
try:
|
||||||
|
await self._coordinator.submit_result(
|
||||||
|
job_id,
|
||||||
|
{
|
||||||
|
"result": result.output,
|
||||||
|
"metrics": {"workspace": str(workspace)},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning("failed to submit result", extra={"job_id": job_id}, exc_info=exc)
|
||||||
|
else:
|
||||||
|
await self._coordinator.submit_failure(
|
||||||
|
job_id,
|
||||||
|
{
|
||||||
|
"error_code": result.output.get("error_code", "FAILED"),
|
||||||
|
"error_message": result.output.get("error_message", "Job failed"),
|
||||||
|
"metrics": result.output.get("metrics", {}),
|
||||||
|
},
|
||||||
|
)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user