Fix sync to preserve transaction type field when creating ChainTransaction
Some checks failed
Blockchain Synchronization Verification / sync-verification (push) Successful in 3s
Integration Tests / test-service-integration (push) Failing after 9s
Multi-Node Blockchain Health Monitoring / health-check (push) Successful in 2s
P2P Network Verification / p2p-verification (push) Successful in 2s
Python Tests / test-python (push) Successful in 13s
Security Scanning / security-scan (push) Successful in 22s
Some checks failed
Blockchain Synchronization Verification / sync-verification (push) Successful in 3s
Integration Tests / test-service-integration (push) Failing after 9s
Multi-Node Blockchain Health Monitoring / health-check (push) Successful in 2s
P2P Network Verification / p2p-verification (push) Successful in 2s
Python Tests / test-python (push) Successful in 13s
Security Scanning / security-scan (push) Successful in 22s
This commit is contained in:
@@ -38,8 +38,31 @@ from .env import (
|
||||
get_float_env_var,
|
||||
get_list_env_var,
|
||||
)
|
||||
from .paths import (
|
||||
get_data_path,
|
||||
get_config_path,
|
||||
get_log_path,
|
||||
get_repo_path,
|
||||
ensure_dir,
|
||||
ensure_file_dir,
|
||||
resolve_path,
|
||||
get_keystore_path,
|
||||
get_blockchain_data_path,
|
||||
get_marketplace_data_path,
|
||||
)
|
||||
from .json_utils import (
|
||||
load_json,
|
||||
save_json,
|
||||
merge_json,
|
||||
json_to_string,
|
||||
string_to_json,
|
||||
get_nested_value,
|
||||
set_nested_value,
|
||||
flatten_json,
|
||||
)
|
||||
from .http_client import AITBCHTTPClient
|
||||
|
||||
__version__ = "0.3.0"
|
||||
__version__ = "0.4.0"
|
||||
__all__ = [
|
||||
# Logging
|
||||
"get_logger",
|
||||
@@ -75,4 +98,26 @@ __all__ = [
|
||||
"get_int_env_var",
|
||||
"get_float_env_var",
|
||||
"get_list_env_var",
|
||||
# Path utilities
|
||||
"get_data_path",
|
||||
"get_config_path",
|
||||
"get_log_path",
|
||||
"get_repo_path",
|
||||
"ensure_dir",
|
||||
"ensure_file_dir",
|
||||
"resolve_path",
|
||||
"get_keystore_path",
|
||||
"get_blockchain_data_path",
|
||||
"get_marketplace_data_path",
|
||||
# JSON utilities
|
||||
"load_json",
|
||||
"save_json",
|
||||
"merge_json",
|
||||
"json_to_string",
|
||||
"string_to_json",
|
||||
"get_nested_value",
|
||||
"set_nested_value",
|
||||
"flatten_json",
|
||||
# HTTP client
|
||||
"AITBCHTTPClient",
|
||||
]
|
||||
|
||||
207
aitbc/http_client.py
Normal file
207
aitbc/http_client.py
Normal file
@@ -0,0 +1,207 @@
|
||||
"""
|
||||
AITBC HTTP Client
|
||||
Base HTTP client with common utilities for AITBC applications
|
||||
"""
|
||||
|
||||
import requests
|
||||
from typing import Dict, Any, Optional, Union
|
||||
from .exceptions import NetworkError
|
||||
|
||||
|
||||
class AITBCHTTPClient:
|
||||
"""
|
||||
Base HTTP client for AITBC applications.
|
||||
Provides common HTTP methods with error handling.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str = "",
|
||||
timeout: int = 30,
|
||||
headers: Optional[Dict[str, str]] = None
|
||||
):
|
||||
"""
|
||||
Initialize HTTP client.
|
||||
|
||||
Args:
|
||||
base_url: Base URL for all requests
|
||||
timeout: Request timeout in seconds
|
||||
headers: Default headers for all requests
|
||||
"""
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.timeout = timeout
|
||||
self.headers = headers or {}
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(self.headers)
|
||||
|
||||
def _build_url(self, endpoint: str) -> str:
|
||||
"""
|
||||
Build full URL from base URL and endpoint.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
|
||||
Returns:
|
||||
Full URL
|
||||
"""
|
||||
if endpoint.startswith("http://") or endpoint.startswith("https://"):
|
||||
return endpoint
|
||||
return f"{self.base_url}/{endpoint.lstrip('/')}"
|
||||
|
||||
def get(
|
||||
self,
|
||||
endpoint: str,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform GET request.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
params: Query parameters
|
||||
headers: Additional headers
|
||||
|
||||
Returns:
|
||||
Response data as dictionary
|
||||
|
||||
Raises:
|
||||
NetworkError: If request fails
|
||||
"""
|
||||
url = self._build_url(endpoint)
|
||||
req_headers = {**self.headers, **(headers or {})}
|
||||
|
||||
try:
|
||||
response = self.session.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=req_headers,
|
||||
timeout=self.timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
raise NetworkError(f"GET request failed: {e}")
|
||||
|
||||
def post(
|
||||
self,
|
||||
endpoint: str,
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
json: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform POST request.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
data: Form data
|
||||
json: JSON data
|
||||
headers: Additional headers
|
||||
|
||||
Returns:
|
||||
Response data as dictionary
|
||||
|
||||
Raises:
|
||||
NetworkError: If request fails
|
||||
"""
|
||||
url = self._build_url(endpoint)
|
||||
req_headers = {**self.headers, **(headers or {})}
|
||||
|
||||
try:
|
||||
response = self.session.post(
|
||||
url,
|
||||
data=data,
|
||||
json=json,
|
||||
headers=req_headers,
|
||||
timeout=self.timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
raise NetworkError(f"POST request failed: {e}")
|
||||
|
||||
def put(
|
||||
self,
|
||||
endpoint: str,
|
||||
data: Optional[Dict[str, Any]] = None,
|
||||
json: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform PUT request.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
data: Form data
|
||||
json: JSON data
|
||||
headers: Additional headers
|
||||
|
||||
Returns:
|
||||
Response data as dictionary
|
||||
|
||||
Raises:
|
||||
NetworkError: If request fails
|
||||
"""
|
||||
url = self._build_url(endpoint)
|
||||
req_headers = {**self.headers, **(headers or {})}
|
||||
|
||||
try:
|
||||
response = self.session.put(
|
||||
url,
|
||||
data=data,
|
||||
json=json,
|
||||
headers=req_headers,
|
||||
timeout=self.timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
raise NetworkError(f"PUT request failed: {e}")
|
||||
|
||||
def delete(
|
||||
self,
|
||||
endpoint: str,
|
||||
params: Optional[Dict[str, Any]] = None,
|
||||
headers: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Perform DELETE request.
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
params: Query parameters
|
||||
headers: Additional headers
|
||||
|
||||
Returns:
|
||||
Response data as dictionary
|
||||
|
||||
Raises:
|
||||
NetworkError: If request fails
|
||||
"""
|
||||
url = self._build_url(endpoint)
|
||||
req_headers = {**self.headers, **(headers or {})}
|
||||
|
||||
try:
|
||||
response = self.session.delete(
|
||||
url,
|
||||
params=params,
|
||||
headers=req_headers,
|
||||
timeout=self.timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json() if response.content else {}
|
||||
except requests.RequestException as e:
|
||||
raise NetworkError(f"DELETE request failed: {e}")
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the HTTP session."""
|
||||
self.session.close()
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
self.close()
|
||||
157
aitbc/json_utils.py
Normal file
157
aitbc/json_utils.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""
|
||||
AITBC JSON Utilities
|
||||
Centralized JSON loading, saving, and manipulation
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List, Optional
|
||||
from .exceptions import ConfigurationError
|
||||
|
||||
|
||||
def load_json(path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Load JSON data from a file.
|
||||
|
||||
Args:
|
||||
path: Path to JSON file
|
||||
|
||||
Returns:
|
||||
Parsed JSON data as dictionary
|
||||
|
||||
Raises:
|
||||
ConfigurationError: If file cannot be read or parsed
|
||||
"""
|
||||
try:
|
||||
with open(path, 'r') as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
raise ConfigurationError(f"JSON file not found: {path}")
|
||||
except json.JSONDecodeError as e:
|
||||
raise ConfigurationError(f"Invalid JSON in {path}: {e}")
|
||||
|
||||
|
||||
def save_json(data: Dict[str, Any], path: Path, indent: int = 2) -> None:
|
||||
"""
|
||||
Save JSON data to a file.
|
||||
|
||||
Args:
|
||||
data: Dictionary to save as JSON
|
||||
path: Path to output file
|
||||
indent: JSON indentation level
|
||||
"""
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=indent)
|
||||
|
||||
|
||||
def merge_json(*paths: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Merge multiple JSON files, later files override earlier ones.
|
||||
|
||||
Args:
|
||||
*paths: Variable number of JSON file paths
|
||||
|
||||
Returns:
|
||||
Merged dictionary
|
||||
"""
|
||||
merged = {}
|
||||
for path in paths:
|
||||
data = load_json(path)
|
||||
merged.update(data)
|
||||
return merged
|
||||
|
||||
|
||||
def json_to_string(data: Dict[str, Any], indent: int = 2) -> str:
|
||||
"""
|
||||
Convert dictionary to JSON string.
|
||||
|
||||
Args:
|
||||
data: Dictionary to convert
|
||||
indent: JSON indentation level
|
||||
|
||||
Returns:
|
||||
JSON string
|
||||
"""
|
||||
return json.dumps(data, indent=indent)
|
||||
|
||||
|
||||
def string_to_json(json_str: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse JSON string to dictionary.
|
||||
|
||||
Args:
|
||||
json_str: JSON string
|
||||
|
||||
Returns:
|
||||
Parsed dictionary
|
||||
|
||||
Raises:
|
||||
ConfigurationError: If string cannot be parsed
|
||||
"""
|
||||
try:
|
||||
return json.loads(json_str)
|
||||
except json.JSONDecodeError as e:
|
||||
raise ConfigurationError(f"Invalid JSON string: {e}")
|
||||
|
||||
|
||||
def get_nested_value(data: Dict[str, Any], *keys: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Get a nested value from a dictionary using dot notation or key chain.
|
||||
|
||||
Args:
|
||||
data: Dictionary to search
|
||||
*keys: Keys to traverse (e.g., "a", "b", "c" for data["a"]["b"]["c"])
|
||||
default: Default value if key not found
|
||||
|
||||
Returns:
|
||||
Nested value or default
|
||||
"""
|
||||
current = data
|
||||
for key in keys:
|
||||
if isinstance(current, dict) and key in current:
|
||||
current = current[key]
|
||||
else:
|
||||
return default
|
||||
return current
|
||||
|
||||
|
||||
def set_nested_value(data: Dict[str, Any], *keys: str, value: Any) -> None:
|
||||
"""
|
||||
Set a nested value in a dictionary using key chain.
|
||||
|
||||
Args:
|
||||
data: Dictionary to modify
|
||||
*keys: Keys to traverse (e.g., "a", "b", "c" for data["a"]["b"]["c"])
|
||||
value: Value to set
|
||||
"""
|
||||
current = data
|
||||
for key in keys[:-1]:
|
||||
if key not in current:
|
||||
current[key] = {}
|
||||
current = current[key]
|
||||
current[keys[-1]] = value
|
||||
|
||||
|
||||
def flatten_json(data: Dict[str, Any], separator: str = ".") -> Dict[str, Any]:
|
||||
"""
|
||||
Flatten a nested dictionary using dot notation.
|
||||
|
||||
Args:
|
||||
data: Nested dictionary
|
||||
separator: Separator for flattened keys
|
||||
|
||||
Returns:
|
||||
Flattened dictionary
|
||||
"""
|
||||
def _flatten(obj: Any, parent_key: str = "") -> Dict[str, Any]:
|
||||
items = {}
|
||||
if isinstance(obj, dict):
|
||||
for key, value in obj.items():
|
||||
new_key = f"{parent_key}{separator}{key}" if parent_key else key
|
||||
items.update(_flatten(value, new_key))
|
||||
else:
|
||||
items[parent_key] = obj
|
||||
return items
|
||||
|
||||
return _flatten(data)
|
||||
153
aitbc/paths.py
Normal file
153
aitbc/paths.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""
|
||||
AITBC Path Utilities
|
||||
Centralized path resolution and directory management
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from .constants import DATA_DIR, CONFIG_DIR, LOG_DIR, REPO_DIR
|
||||
from .exceptions import ConfigurationError
|
||||
|
||||
|
||||
def get_data_path(subpath: str = "") -> Path:
|
||||
"""
|
||||
Get a path within the AITBC data directory.
|
||||
|
||||
Args:
|
||||
subpath: Optional subpath relative to data directory
|
||||
|
||||
Returns:
|
||||
Full path to data directory or subpath
|
||||
"""
|
||||
if subpath:
|
||||
return DATA_DIR / subpath
|
||||
return DATA_DIR
|
||||
|
||||
|
||||
def get_config_path(filename: str) -> Path:
|
||||
"""
|
||||
Get a path within the AITBC configuration directory.
|
||||
|
||||
Args:
|
||||
filename: Configuration filename
|
||||
|
||||
Returns:
|
||||
Full path to configuration file
|
||||
"""
|
||||
return CONFIG_DIR / filename
|
||||
|
||||
|
||||
def get_log_path(filename: str) -> Path:
|
||||
"""
|
||||
Get a path within the AITBC log directory.
|
||||
|
||||
Args:
|
||||
filename: Log filename
|
||||
|
||||
Returns:
|
||||
Full path to log file
|
||||
"""
|
||||
return LOG_DIR / filename
|
||||
|
||||
|
||||
def get_repo_path(subpath: str = "") -> Path:
|
||||
"""
|
||||
Get a path within the AITBC repository.
|
||||
|
||||
Args:
|
||||
subpath: Optional subpath relative to repository
|
||||
|
||||
Returns:
|
||||
Full path to repository or subpath
|
||||
"""
|
||||
if subpath:
|
||||
return REPO_DIR / subpath
|
||||
return REPO_DIR
|
||||
|
||||
|
||||
def ensure_dir(path: Path) -> Path:
|
||||
"""
|
||||
Ensure a directory exists, creating it if necessary.
|
||||
|
||||
Args:
|
||||
path: Directory path
|
||||
|
||||
Returns:
|
||||
The path (guaranteed to exist)
|
||||
"""
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
def ensure_file_dir(filepath: Path) -> Path:
|
||||
"""
|
||||
Ensure the parent directory of a file exists.
|
||||
|
||||
Args:
|
||||
filepath: File path
|
||||
|
||||
Returns:
|
||||
The parent directory path (guaranteed to exist)
|
||||
"""
|
||||
return ensure_dir(filepath.parent)
|
||||
|
||||
|
||||
def resolve_path(path: str, base: Path = REPO_DIR) -> Path:
|
||||
"""
|
||||
Resolve a path relative to a base directory.
|
||||
|
||||
Args:
|
||||
path: Path to resolve (can be absolute or relative)
|
||||
base: Base directory for relative paths
|
||||
|
||||
Returns:
|
||||
Resolved absolute path
|
||||
"""
|
||||
p = Path(path)
|
||||
if p.is_absolute():
|
||||
return p
|
||||
return base / p
|
||||
|
||||
|
||||
def get_keystore_path(wallet_name: str = "") -> Path:
|
||||
"""
|
||||
Get a path within the AITBC keystore directory.
|
||||
|
||||
Args:
|
||||
wallet_name: Optional wallet name for specific keystore file
|
||||
|
||||
Returns:
|
||||
Full path to keystore directory or specific wallet file
|
||||
"""
|
||||
keystore_dir = DATA_DIR / "keystore"
|
||||
if wallet_name:
|
||||
return keystore_dir / f"{wallet_name}.json"
|
||||
return keystore_dir
|
||||
|
||||
|
||||
def get_blockchain_data_path(chain_id: str = "ait-mainnet") -> Path:
|
||||
"""
|
||||
Get a path within the blockchain data directory.
|
||||
|
||||
Args:
|
||||
chain_id: Chain identifier
|
||||
|
||||
Returns:
|
||||
Full path to blockchain data directory
|
||||
"""
|
||||
return DATA_DIR / "data" / chain_id
|
||||
|
||||
|
||||
def get_marketplace_data_path(subpath: str = "") -> Path:
|
||||
"""
|
||||
Get a path within the marketplace data directory.
|
||||
|
||||
Args:
|
||||
subpath: Optional subpath relative to marketplace directory
|
||||
|
||||
Returns:
|
||||
Full path to marketplace data directory or subpath
|
||||
"""
|
||||
marketplace_dir = DATA_DIR / "marketplace"
|
||||
if subpath:
|
||||
return marketplace_dir / subpath
|
||||
return marketplace_dir
|
||||
@@ -355,6 +355,13 @@ class ChainSync:
|
||||
logger.warning(f"[SYNC] Failed to apply transaction {tx_hash}: {error_msg}")
|
||||
# For now, log warning but continue (to be enforced in production)
|
||||
|
||||
# Extract type from transaction data
|
||||
tx_type = tx_data.get("type", "TRANSFER")
|
||||
if tx_type:
|
||||
tx_type = tx_type.upper()
|
||||
else:
|
||||
tx_type = "TRANSFER"
|
||||
|
||||
tx = ChainTransaction(
|
||||
chain_id=self._chain_id,
|
||||
tx_hash=tx_hash,
|
||||
@@ -362,6 +369,7 @@ class ChainSync:
|
||||
sender=sender_addr,
|
||||
recipient=recipient_addr,
|
||||
payload=tx_data,
|
||||
type=tx_type,
|
||||
)
|
||||
session.add(tx)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user