chore(cleanup): remove obsolete scripts and update paths for production deployment
- Remove dev/scripts/check-file-organization.sh (obsolete organization checker) - Remove dev/scripts/community_onboarding.py (unused 559-line automation script) - Update gpu_miner_host.py log path from /home/oib/windsurf/aitbc to /opt/aitbc - Add service status and standardization badges to README.md
This commit is contained in:
126
dev/scripts/development/aitbc-cli.sh
Executable file
126
dev/scripts/development/aitbc-cli.sh
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
CLI_PY="$ROOT_DIR/cli/client.py"
|
||||
|
||||
AITBC_URL="${AITBC_URL:-http://localhost:8000}"
|
||||
CLIENT_KEY="${CLIENT_KEY:?Set CLIENT_KEY env var}"
|
||||
ADMIN_KEY="${ADMIN_KEY:?Set ADMIN_KEY env var}"
|
||||
MINER_KEY="${MINER_KEY:?Set MINER_KEY env var}"
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
AITBC CLI wrapper
|
||||
|
||||
Usage:
|
||||
aitbc-cli.sh submit <type> [--prompt TEXT] [--model NAME] [--ttl SECONDS]
|
||||
aitbc-cli.sh status <job_id>
|
||||
aitbc-cli.sh browser [--block-limit N] [--tx-limit N] [--receipt-limit N] [--job-id ID]
|
||||
aitbc-cli.sh blocks [--limit N]
|
||||
aitbc-cli.sh receipts [--limit N] [--job-id ID]
|
||||
aitbc-cli.sh cancel <job_id>
|
||||
aitbc-cli.sh admin-miners
|
||||
aitbc-cli.sh admin-jobs
|
||||
aitbc-cli.sh admin-stats
|
||||
aitbc-cli.sh admin-cancel-running
|
||||
aitbc-cli.sh health
|
||||
|
||||
Environment overrides:
|
||||
AITBC_URL (default: http://localhost:8000)
|
||||
CLIENT_KEY (required)
|
||||
ADMIN_KEY (required)
|
||||
MINER_KEY (required)
|
||||
EOF
|
||||
}
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cmd="$1"
|
||||
shift
|
||||
|
||||
case "$cmd" in
|
||||
submit)
|
||||
python3 "$CLI_PY" --url "$AITBC_URL" --api-key "$CLIENT_KEY" submit "$@"
|
||||
;;
|
||||
status)
|
||||
python3 "$CLI_PY" --url "$AITBC_URL" --api-key "$CLIENT_KEY" status "$@"
|
||||
;;
|
||||
browser)
|
||||
python3 "$CLI_PY" --url "$AITBC_URL" --api-key "$CLIENT_KEY" browser "$@"
|
||||
;;
|
||||
blocks)
|
||||
python3 "$CLI_PY" --url "$AITBC_URL" --api-key "$CLIENT_KEY" blocks "$@"
|
||||
;;
|
||||
receipts)
|
||||
limit=10
|
||||
job_id=""
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--limit)
|
||||
limit="$2"
|
||||
shift 2
|
||||
;;
|
||||
--job-id)
|
||||
job_id="$2"
|
||||
shift 2
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [[ -n "$job_id" ]]; then
|
||||
curl -sS "$AITBC_URL/v1/explorer/receipts?limit=${limit}&job_id=${job_id}"
|
||||
else
|
||||
curl -sS "$AITBC_URL/v1/explorer/receipts?limit=${limit}"
|
||||
fi
|
||||
;;
|
||||
cancel)
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: aitbc-cli.sh cancel <job_id>" >&2
|
||||
exit 1
|
||||
fi
|
||||
job_id="$1"
|
||||
curl -sS -X POST -H "X-Api-Key: ${CLIENT_KEY}" "$AITBC_URL/v1/jobs/${job_id}/cancel"
|
||||
;;
|
||||
admin-miners)
|
||||
curl -sS -H "X-Api-Key: ${ADMIN_KEY}" "$AITBC_URL/v1/admin/miners"
|
||||
;;
|
||||
admin-jobs)
|
||||
curl -sS -H "X-Api-Key: ${ADMIN_KEY}" "$AITBC_URL/v1/admin/jobs"
|
||||
;;
|
||||
admin-stats)
|
||||
curl -sS -H "X-Api-Key: ${ADMIN_KEY}" "$AITBC_URL/v1/admin/stats"
|
||||
;;
|
||||
admin-cancel-running)
|
||||
echo "Fetching running jobs..."
|
||||
running_jobs=$(curl -sS -H "X-Api-Key: ${ADMIN_KEY}" "$AITBC_URL/v1/admin/jobs" | jq -r '.[] | select(.state == "running") | .id')
|
||||
if [[ -z "$running_jobs" ]]; then
|
||||
echo "No running jobs found."
|
||||
else
|
||||
count=0
|
||||
for job_id in $running_jobs; do
|
||||
echo "Cancelling job: $job_id"
|
||||
curl -sS -X POST -H "X-Api-Key: ${CLIENT_KEY}" "$AITBC_URL/v1/jobs/${job_id}/cancel" > /dev/null
|
||||
((count++))
|
||||
done
|
||||
echo "Cancelled $count running jobs."
|
||||
fi
|
||||
;;
|
||||
health)
|
||||
curl -sS "$AITBC_URL/v1/health"
|
||||
;;
|
||||
help|-h|--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echo "Unknown command: $cmd" >&2
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
17
dev/scripts/development/aitbc-pythonpath.pth
Normal file
17
dev/scripts/development/aitbc-pythonpath.pth
Normal file
@@ -0,0 +1,17 @@
|
||||
# Add project paths to Python path for imports
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Get the directory where this .pth file is located
|
||||
project_root = Path(__file__).parent
|
||||
|
||||
# Add package source directories
|
||||
sys.path.insert(0, str(project_root / "packages" / "py" / "aitbc-core" / "src"))
|
||||
sys.path.insert(0, str(project_root / "packages" / "py" / "aitbc-crypto" / "src"))
|
||||
sys.path.insert(0, str(project_root / "packages" / "py" / "aitbc-p2p" / "src"))
|
||||
sys.path.insert(0, str(project_root / "packages" / "py" / "aitbc-sdk" / "src"))
|
||||
|
||||
# Add app source directories
|
||||
sys.path.insert(0, str(project_root / "apps" / "coordinator-api" / "src"))
|
||||
sys.path.insert(0, str(project_root / "apps" / "wallet-daemon" / "src"))
|
||||
sys.path.insert(0, str(project_root / "apps" / "blockchain-node" / "src"))
|
||||
559
dev/scripts/development/community_onboarding.py
Normal file
559
dev/scripts/development/community_onboarding.py
Normal file
@@ -0,0 +1,559 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AITBC Community Onboarding Automation
|
||||
|
||||
This script automates the onboarding process for new community members,
|
||||
including welcome messages, resource links, and initial guidance.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
|
||||
class CommunityOnboarding:
|
||||
"""Automated community onboarding system."""
|
||||
|
||||
def __init__(self, config_path: str = "config/community_config.json"):
|
||||
self.config = self._load_config(config_path)
|
||||
self.logger = self._setup_logging()
|
||||
self.onboarding_data = self._load_onboarding_data()
|
||||
|
||||
def _load_config(self, config_path: str) -> Dict:
|
||||
"""Load community configuration."""
|
||||
default_config = {
|
||||
"discord": {
|
||||
"bot_token": os.getenv("DISCORD_BOT_TOKEN"),
|
||||
"welcome_channel": "welcome",
|
||||
"general_channel": "general",
|
||||
"help_channel": "help"
|
||||
},
|
||||
"github": {
|
||||
"token": os.getenv("GITHUB_TOKEN"),
|
||||
"org": "aitbc",
|
||||
"repo": "aitbc",
|
||||
"team_slugs": ["core-team", "maintainers", "contributors"]
|
||||
},
|
||||
"email": {
|
||||
"smtp_server": os.getenv("SMTP_SERVER"),
|
||||
"smtp_port": 587,
|
||||
"username": os.getenv("SMTP_USERNAME"),
|
||||
"password": os.getenv("SMTP_PASSWORD"),
|
||||
"from_address": "community@aitbc.dev"
|
||||
},
|
||||
"onboarding": {
|
||||
"welcome_delay_hours": 1,
|
||||
"follow_up_days": [3, 7, 14],
|
||||
"resource_links": {
|
||||
"documentation": "https://docs.aitbc.dev",
|
||||
"api_reference": "https://api.aitbc.dev/docs",
|
||||
"plugin_development": "https://docs.aitbc.dev/plugins",
|
||||
"community_forum": "https://community.aitbc.dev",
|
||||
"discord_invite": "https://discord.gg/aitbc"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
config_file = Path(config_path)
|
||||
if config_file.exists():
|
||||
with open(config_file, 'r') as f:
|
||||
user_config = json.load(f)
|
||||
default_config.update(user_config)
|
||||
|
||||
return default_config
|
||||
|
||||
def _setup_logging(self) -> logging.Logger:
|
||||
"""Setup logging for the onboarding system."""
|
||||
logger = logging.getLogger("community_onboarding")
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
||||
|
||||
def _load_onboarding_data(self) -> Dict:
|
||||
"""Load onboarding data from file."""
|
||||
data_file = Path("data/onboarding_data.json")
|
||||
if data_file.exists():
|
||||
with open(data_file, 'r') as f:
|
||||
return json.load(f)
|
||||
return {"members": {}, "messages": {}, "follow_ups": {}}
|
||||
|
||||
def _save_onboarding_data(self) -> None:
|
||||
"""Save onboarding data to file."""
|
||||
data_file = Path("data/onboarding_data.json")
|
||||
data_file.parent.mkdir(exist_ok=True)
|
||||
with open(data_file, 'w') as f:
|
||||
json.dump(self.onboarding_data, f, indent=2)
|
||||
|
||||
async def welcome_new_member(self, member_id: str, member_name: str,
|
||||
platform: str = "discord") -> bool:
|
||||
"""Welcome a new community member."""
|
||||
try:
|
||||
self.logger.info(f"Welcoming new member: {member_name} on {platform}")
|
||||
|
||||
# Create onboarding record
|
||||
self.onboarding_data["members"][member_id] = {
|
||||
"name": member_name,
|
||||
"platform": platform,
|
||||
"joined_at": datetime.now().isoformat(),
|
||||
"welcome_sent": False,
|
||||
"follow_ups_sent": [],
|
||||
"resources_viewed": [],
|
||||
"contributions": [],
|
||||
"status": "new"
|
||||
}
|
||||
|
||||
# Schedule welcome message
|
||||
await self._schedule_welcome_message(member_id)
|
||||
|
||||
# Track member in analytics
|
||||
await self._track_member_analytics(member_id, "joined")
|
||||
|
||||
self._save_onboarding_data()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error welcoming member {member_name}: {e}")
|
||||
return False
|
||||
|
||||
async def _schedule_welcome_message(self, member_id: str) -> None:
|
||||
"""Schedule welcome message for new member."""
|
||||
delay_hours = self.config["onboarding"]["welcome_delay_hours"]
|
||||
|
||||
# In production, this would use a proper task queue
|
||||
# For now, we'll send immediately
|
||||
await asyncio.sleep(delay_hours * 3600)
|
||||
await self.send_welcome_message(member_id)
|
||||
|
||||
async def send_welcome_message(self, member_id: str) -> bool:
|
||||
"""Send welcome message to member."""
|
||||
try:
|
||||
member_data = self.onboarding_data["members"][member_id]
|
||||
platform = member_data["platform"]
|
||||
|
||||
if platform == "discord":
|
||||
success = await self._send_discord_welcome(member_id)
|
||||
elif platform == "github":
|
||||
success = await self._send_github_welcome(member_id)
|
||||
else:
|
||||
self.logger.warning(f"Unsupported platform: {platform}")
|
||||
return False
|
||||
|
||||
if success:
|
||||
member_data["welcome_sent"] = True
|
||||
member_data["welcome_sent_at"] = datetime.now().isoformat()
|
||||
self._save_onboarding_data()
|
||||
await self._track_member_analytics(member_id, "welcome_sent")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending welcome message to {member_id}: {e}")
|
||||
return False
|
||||
|
||||
async def _send_discord_welcome(self, member_id: str) -> bool:
|
||||
"""Send welcome message via Discord."""
|
||||
try:
|
||||
# Discord bot implementation would go here
|
||||
# For now, we'll log the message
|
||||
|
||||
member_data = self.onboarding_data["members"][member_id]
|
||||
welcome_message = self._generate_welcome_message(member_data["name"])
|
||||
|
||||
self.logger.info(f"Discord welcome message for {member_id}: {welcome_message}")
|
||||
|
||||
# In production:
|
||||
# await discord_bot.send_message(
|
||||
# channel_id=self.config["discord"]["welcome_channel"],
|
||||
# content=welcome_message
|
||||
# )
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending Discord welcome: {e}")
|
||||
return False
|
||||
|
||||
async def _send_github_welcome(self, member_id: str) -> bool:
|
||||
"""Send welcome message via GitHub."""
|
||||
try:
|
||||
# GitHub API implementation would go here
|
||||
member_data = self.onboarding_data["members"][member_id]
|
||||
welcome_message = self._generate_welcome_message(member_data["name"])
|
||||
|
||||
self.logger.info(f"GitHub welcome message for {member_id}: {welcome_message}")
|
||||
|
||||
# In production:
|
||||
# await github_api.create_issue_comment(
|
||||
# repo=self.config["github"]["repo"],
|
||||
# issue_number=welcome_issue_number,
|
||||
# body=welcome_message
|
||||
# )
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending GitHub welcome: {e}")
|
||||
return False
|
||||
|
||||
def _generate_welcome_message(self, member_name: str) -> str:
|
||||
"""Generate personalized welcome message."""
|
||||
resources = self.config["onboarding"]["resource_links"]
|
||||
|
||||
message = f"""🎉 Welcome to AITBC, {member_name}!
|
||||
|
||||
We're excited to have you join our community of developers, researchers, and innovators building the future of AI-powered blockchain technology.
|
||||
|
||||
🚀 **Quick Start Guide:**
|
||||
1. **Documentation**: {resources["documentation"]}
|
||||
2. **API Reference**: {resources["api_reference"]}
|
||||
3. **Plugin Development**: {resources["plugin_development"]}
|
||||
4. **Community Forum**: {resources["community_forum"]}
|
||||
5. **Discord Chat**: {resources["discord_invite"]}
|
||||
|
||||
📋 **Next Steps:**
|
||||
- ⭐ Star our repository on GitHub
|
||||
- 📖 Read our contribution guidelines
|
||||
- 💬 Introduce yourself in the #introductions channel
|
||||
- 🔍 Check out our "good first issues" for newcomers
|
||||
|
||||
🛠️ **Ways to Contribute:**
|
||||
- Code contributions (bug fixes, features)
|
||||
- Documentation improvements
|
||||
- Plugin development
|
||||
- Community support and mentoring
|
||||
- Testing and feedback
|
||||
|
||||
❓ **Need Help?**
|
||||
- Ask questions in #help channel
|
||||
- Check our FAQ at {resources["documentation"]}/faq
|
||||
- Join our weekly office hours (Tuesdays 2PM UTC)
|
||||
|
||||
We're here to help you succeed! Don't hesitate to reach out.
|
||||
|
||||
Welcome aboard! 🚀
|
||||
|
||||
#AITBCCommunity #Welcome #OpenSource"""
|
||||
|
||||
return message
|
||||
|
||||
async def send_follow_up_message(self, member_id: str, day: int) -> bool:
|
||||
"""Send follow-up message to member."""
|
||||
try:
|
||||
member_data = self.onboarding_data["members"][member_id]
|
||||
|
||||
if day in member_data["follow_ups_sent"]:
|
||||
return True # Already sent
|
||||
|
||||
follow_up_message = self._generate_follow_up_message(member_data["name"], day)
|
||||
|
||||
if member_data["platform"] == "discord":
|
||||
success = await self._send_discord_follow_up(member_id, follow_up_message)
|
||||
else:
|
||||
success = await self._send_email_follow_up(member_id, follow_up_message)
|
||||
|
||||
if success:
|
||||
member_data["follow_ups_sent"].append(day)
|
||||
member_data[f"follow_up_{day}_sent_at"] = datetime.now().isoformat()
|
||||
self._save_onboarding_data()
|
||||
await self._track_member_analytics(member_id, f"follow_up_{day}")
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending follow-up to {member_id}: {e}")
|
||||
return False
|
||||
|
||||
def _generate_follow_up_message(self, member_name: str, day: int) -> str:
|
||||
"""Generate follow-up message based on day."""
|
||||
resources = self.config["onboarding"]["resource_links"]
|
||||
|
||||
if day == 3:
|
||||
return f"""Hi {member_name}! 👋
|
||||
|
||||
Hope you're settling in well! Here are some resources to help you get started:
|
||||
|
||||
🔧 **Development Setup:**
|
||||
- Clone the repository: `git clone https://github.com/aitbc/aitbc`
|
||||
- Install dependencies: `poetry install`
|
||||
- Run tests: `pytest`
|
||||
|
||||
📚 **Learning Resources:**
|
||||
- Architecture overview: {resources["documentation"]}/architecture
|
||||
- Plugin tutorial: {resources["plugin_development"]}/tutorial
|
||||
- API examples: {resources["api_reference"]}/examples
|
||||
|
||||
💬 **Community Engagement:**
|
||||
- Join our weekly community call (Thursdays 3PM UTC)
|
||||
- Share your progress in #show-and-tell
|
||||
- Ask for help in #help
|
||||
|
||||
How's your experience been so far? Any questions or challenges we can help with?
|
||||
|
||||
#AITBCCommunity #Onboarding #GetStarted"""
|
||||
|
||||
elif day == 7:
|
||||
return f"""Hi {member_name}! 🎯
|
||||
|
||||
You've been with us for a week! We'd love to hear about your experience:
|
||||
|
||||
📊 **Quick Check-in:**
|
||||
- Have you been able to set up your development environment?
|
||||
- Have you explored the codebase or documentation?
|
||||
- Are there any areas where you'd like more guidance?
|
||||
|
||||
🚀 **Contribution Opportunities:**
|
||||
- Good first issues: https://github.com/aitbc/aitbc/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22
|
||||
- Documentation improvements: {resources["documentation"]}/contribute
|
||||
- Plugin ideas: {resources["plugin_development"]}/ideas
|
||||
|
||||
🎉 **Community Events:**
|
||||
- Monthly hackathon (first Saturday)
|
||||
- Plugin showcase (third Thursday)
|
||||
- Office hours (every Tuesday 2PM UTC)
|
||||
|
||||
Your feedback helps us improve the onboarding experience. What would make your journey more successful?
|
||||
|
||||
#AITBCCommunity #Feedback #Community"""
|
||||
|
||||
elif day == 14:
|
||||
return f"""Hi {member_name}! 🌟
|
||||
|
||||
Two weeks in - you're becoming part of the AITBC ecosystem!
|
||||
|
||||
🎯 **Next Level Engagement:**
|
||||
- Consider joining a specialized team (security, plugins, docs, etc.)
|
||||
- Start a plugin project: {resources["plugin_development"]}/starter
|
||||
- Review a pull request to learn the codebase
|
||||
- Share your ideas in #feature-requests
|
||||
|
||||
🏆 **Recognition Program:**
|
||||
- Contributor of the month nominations
|
||||
- Plugin contest participation
|
||||
- Community spotlight features
|
||||
- Speaking opportunities at community events
|
||||
|
||||
📈 **Your Impact:**
|
||||
- Every contribution, no matter how small, helps
|
||||
- Your questions help us improve documentation
|
||||
- Your feedback shapes the project direction
|
||||
- Your presence strengthens the community
|
||||
|
||||
What would you like to focus on next? We're here to support your journey!
|
||||
|
||||
#AITBCCommunity #Growth #Impact"""
|
||||
|
||||
else:
|
||||
return f"Hi {member_name}! Just checking in. How's your AITBC journey going?"
|
||||
|
||||
async def _send_discord_follow_up(self, member_id: str, message: str) -> bool:
|
||||
"""Send follow-up via Discord DM."""
|
||||
try:
|
||||
self.logger.info(f"Discord follow-up for {member_id}: {message[:100]}...")
|
||||
# Discord DM implementation
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending Discord follow-up: {e}")
|
||||
return False
|
||||
|
||||
async def _send_email_follow_up(self, member_id: str, message: str) -> bool:
|
||||
"""Send follow-up via email."""
|
||||
try:
|
||||
self.logger.info(f"Email follow-up for {member_id}: {message[:100]}...")
|
||||
# Email implementation
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error sending email follow-up: {e}")
|
||||
return False
|
||||
|
||||
async def track_member_activity(self, member_id: str, activity_type: str,
|
||||
details: Dict = None) -> None:
|
||||
"""Track member activity for analytics."""
|
||||
try:
|
||||
if member_id not in self.onboarding_data["members"]:
|
||||
return
|
||||
|
||||
member_data = self.onboarding_data["members"][member_id]
|
||||
|
||||
if "activities" not in member_data:
|
||||
member_data["activities"] = []
|
||||
|
||||
activity = {
|
||||
"type": activity_type,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"details": details or {}
|
||||
}
|
||||
|
||||
member_data["activities"].append(activity)
|
||||
|
||||
# Update member status based on activity
|
||||
if activity_type == "first_contribution":
|
||||
member_data["status"] = "contributor"
|
||||
elif activity_type == "first_plugin":
|
||||
member_data["status"] = "plugin_developer"
|
||||
|
||||
self._save_onboarding_data()
|
||||
await self._track_member_analytics(member_id, activity_type)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error tracking activity for {member_id}: {e}")
|
||||
|
||||
async def _track_member_analytics(self, member_id: str, event: str) -> None:
|
||||
"""Track analytics for member events."""
|
||||
try:
|
||||
# Analytics implementation would go here
|
||||
self.logger.info(f"Analytics event: {member_id} - {event}")
|
||||
|
||||
# In production, send to analytics service
|
||||
# await analytics_service.track_event({
|
||||
# "member_id": member_id,
|
||||
# "event": event,
|
||||
# "timestamp": datetime.now().isoformat(),
|
||||
# "properties": {}
|
||||
# })
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error tracking analytics: {e}")
|
||||
|
||||
async def process_follow_ups(self) -> None:
|
||||
"""Process scheduled follow-ups for all members."""
|
||||
try:
|
||||
current_date = datetime.now()
|
||||
|
||||
for member_id, member_data in self.onboarding_data["members"].items():
|
||||
joined_date = datetime.fromisoformat(member_data["joined_at"])
|
||||
|
||||
for day in self.config["onboarding"]["follow_up_days"]:
|
||||
follow_up_date = joined_date + timedelta(days=day)
|
||||
|
||||
if (current_date >= follow_up_date and
|
||||
day not in member_data["follow_ups_sent"]):
|
||||
await self.send_follow_up_message(member_id, day)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error processing follow-ups: {e}")
|
||||
|
||||
async def generate_onboarding_report(self) -> Dict:
|
||||
"""Generate onboarding analytics report."""
|
||||
try:
|
||||
total_members = len(self.onboarding_data["members"])
|
||||
welcome_sent = sum(1 for m in self.onboarding_data["members"].values() if m.get("welcome_sent"))
|
||||
|
||||
status_counts = {}
|
||||
for member in self.onboarding_data["members"].values():
|
||||
status = member.get("status", "new")
|
||||
status_counts[status] = status_counts.get(status, 0) + 1
|
||||
|
||||
platform_counts = {}
|
||||
for member in self.onboarding_data["members"].values():
|
||||
platform = member.get("platform", "unknown")
|
||||
platform_counts[platform] = platform_counts.get(platform, 0) + 1
|
||||
|
||||
return {
|
||||
"total_members": total_members,
|
||||
"welcome_sent": welcome_sent,
|
||||
"welcome_rate": welcome_sent / total_members if total_members > 0 else 0,
|
||||
"status_distribution": status_counts,
|
||||
"platform_distribution": platform_counts,
|
||||
"generated_at": datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error generating report: {e}")
|
||||
return {}
|
||||
|
||||
async def run_daily_tasks(self) -> None:
|
||||
"""Run daily onboarding tasks."""
|
||||
try:
|
||||
self.logger.info("Running daily onboarding tasks")
|
||||
|
||||
# Process follow-ups
|
||||
await self.process_follow_ups()
|
||||
|
||||
# Generate daily report
|
||||
report = await self.generate_onboarding_report()
|
||||
self.logger.info(f"Daily onboarding report: {report}")
|
||||
|
||||
# Cleanup old data
|
||||
await self._cleanup_old_data()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error running daily tasks: {e}")
|
||||
|
||||
async def _cleanup_old_data(self) -> None:
|
||||
"""Clean up old onboarding data."""
|
||||
try:
|
||||
cutoff_date = datetime.now() - timedelta(days=365)
|
||||
|
||||
# Remove members older than 1 year with no activity
|
||||
to_remove = []
|
||||
for member_id, member_data in self.onboarding_data["members"].items():
|
||||
joined_date = datetime.fromisoformat(member_data["joined_at"])
|
||||
|
||||
if (joined_date < cutoff_date and
|
||||
not member_data.get("activities") and
|
||||
member_data.get("status") == "new"):
|
||||
to_remove.append(member_id)
|
||||
|
||||
for member_id in to_remove:
|
||||
del self.onboarding_data["members"][member_id]
|
||||
self.logger.info(f"Removed inactive member: {member_id}")
|
||||
|
||||
if to_remove:
|
||||
self._save_onboarding_data()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error cleaning up data: {e}")
|
||||
|
||||
|
||||
# CLI interface for the onboarding system
|
||||
async def main():
|
||||
"""Main CLI interface."""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="AITBC Community Onboarding")
|
||||
parser.add_argument("--welcome", help="Welcome new member (member_id,name,platform)")
|
||||
parser.add_argument("--followup", help="Send follow-up (member_id,day)")
|
||||
parser.add_argument("--report", action="store_true", help="Generate onboarding report")
|
||||
parser.add_argument("--daily", action="store_true", help="Run daily tasks")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
onboarding = CommunityOnboarding()
|
||||
|
||||
if args.welcome:
|
||||
member_id, name, platform = args.welcome.split(",")
|
||||
await onboarding.welcome_new_member(member_id, name, platform)
|
||||
print(f"Welcome message scheduled for {name}")
|
||||
|
||||
elif args.followup:
|
||||
member_id, day = args.followup.split(",")
|
||||
success = await onboarding.send_follow_up_message(member_id, int(day))
|
||||
print(f"Follow-up sent: {success}")
|
||||
|
||||
elif args.report:
|
||||
report = await onboarding.generate_onboarding_report()
|
||||
print(json.dumps(report, indent=2))
|
||||
|
||||
elif args.daily:
|
||||
await onboarding.run_daily_tasks()
|
||||
print("Daily tasks completed")
|
||||
|
||||
else:
|
||||
print("Use --help to see available options")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
355
dev/scripts/development/dotenv_linter.py
Executable file
355
dev/scripts/development/dotenv_linter.py
Executable file
@@ -0,0 +1,355 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Dotenv Linter for AITBC
|
||||
|
||||
This script checks for configuration drift between .env.example and actual
|
||||
environment variable usage in the codebase. It ensures that all environment
|
||||
variables used in the code are documented in .env.example and vice versa.
|
||||
|
||||
Usage:
|
||||
python scripts/dotenv_linter.py
|
||||
python scripts/dotenv_linter.py --fix
|
||||
python scripts/dotenv_linter.py --verbose
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from typing import Set, Dict, List, Tuple
|
||||
import ast
|
||||
import subprocess
|
||||
|
||||
|
||||
class DotenvLinter:
|
||||
"""Linter for .env files and environment variable usage."""
|
||||
|
||||
def __init__(self, project_root: Path = None):
|
||||
"""Initialize the linter."""
|
||||
self.project_root = project_root or Path(__file__).parent.parent
|
||||
self.env_example_path = self.project_root / ".env.example"
|
||||
self.python_files = self._find_python_files()
|
||||
|
||||
def _find_python_files(self) -> List[Path]:
|
||||
"""Find all Python files in the project."""
|
||||
python_files = []
|
||||
for root, dirs, files in os.walk(self.project_root):
|
||||
# Skip hidden directories and common exclusions
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in {
|
||||
'__pycache__', 'node_modules', '.git', 'venv', 'env', '.venv'
|
||||
}]
|
||||
|
||||
for file in files:
|
||||
if file.endswith('.py'):
|
||||
python_files.append(Path(root) / file)
|
||||
|
||||
return python_files
|
||||
|
||||
def _parse_env_example(self) -> Set[str]:
|
||||
"""Parse .env.example and extract all environment variable keys."""
|
||||
env_vars = set()
|
||||
|
||||
if not self.env_example_path.exists():
|
||||
print(f"❌ .env.example not found at {self.env_example_path}")
|
||||
return env_vars
|
||||
|
||||
with open(self.env_example_path, 'r') as f:
|
||||
for line_num, line in enumerate(f, 1):
|
||||
line = line.strip()
|
||||
|
||||
# Skip comments and empty lines
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Extract variable name (everything before =)
|
||||
if '=' in line:
|
||||
var_name = line.split('=')[0].strip()
|
||||
if var_name:
|
||||
env_vars.add(var_name)
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_python(self) -> Set[str]:
|
||||
"""Find all environment variable usage in Python files."""
|
||||
env_vars = set()
|
||||
|
||||
# Patterns to search for
|
||||
patterns = [
|
||||
r'os\.environ\.get\([\'"]([^\'"]+)[\'"]',
|
||||
r'os\.environ\[([\'"]([^\'"]+)[\'"])\]',
|
||||
r'os\.getenv\([\'"]([^\'"]+)[\'"]',
|
||||
r'getenv\([\'"]([^\'"]+)[\'"]',
|
||||
r'environ\.get\([\'"]([^\'"]+)[\'"]',
|
||||
r'environ\[([\'"]([^\'"]+)[\'"])\]',
|
||||
]
|
||||
|
||||
for python_file in self.python_files:
|
||||
try:
|
||||
with open(python_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
for pattern in patterns:
|
||||
matches = re.finditer(pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {python_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_config_files(self) -> Set[str]:
|
||||
"""Find environment variable usage in configuration files."""
|
||||
env_vars = set()
|
||||
|
||||
# Check common config files
|
||||
config_files = [
|
||||
'pyproject.toml',
|
||||
'pytest.ini',
|
||||
'setup.cfg',
|
||||
'tox.ini',
|
||||
'.github/workflows/*.yml',
|
||||
'.github/workflows/*.yaml',
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'Dockerfile',
|
||||
]
|
||||
|
||||
for pattern in config_files:
|
||||
for config_file in self.project_root.glob(pattern):
|
||||
try:
|
||||
with open(config_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for environment variable patterns
|
||||
env_patterns = [
|
||||
r'\${([A-Z_][A-Z0-9_]*)}', # ${VAR_NAME}
|
||||
r'\$([A-Z_][A-Z0-9_]*)', # $VAR_NAME
|
||||
r'env\.([A-Z_][A-Z0-9_]*)', # env.VAR_NAME
|
||||
r'os\.environ\([\'"]([^\'"]+)[\'"]', # os.environ("VAR_NAME")
|
||||
r'getenv\([\'"]([^\'"]+)[\'"]', # getenv("VAR_NAME")
|
||||
]
|
||||
|
||||
for env_pattern in env_patterns:
|
||||
matches = re.finditer(env_pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1) if match.groups() else match.group(0)
|
||||
if var_name.isupper():
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {config_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_shell_scripts(self) -> Set[str]:
|
||||
"""Find environment variable usage in shell scripts."""
|
||||
env_vars = set()
|
||||
|
||||
shell_files = []
|
||||
for root, dirs, files in os.walk(self.project_root):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in {
|
||||
'__pycache__', 'node_modules', '.git', 'venv', 'env', '.venv'
|
||||
}]
|
||||
|
||||
for file in files:
|
||||
if file.endswith(('.sh', '.bash', '.zsh')):
|
||||
shell_files.append(Path(root) / file)
|
||||
|
||||
for shell_file in shell_files:
|
||||
try:
|
||||
with open(shell_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for environment variable patterns in shell scripts
|
||||
patterns = [
|
||||
r'\$\{([A-Z_][A-Z0-9_]*)\}', # ${VAR_NAME}
|
||||
r'\$([A-Z_][A-Z0-9_]*)', # $VAR_NAME
|
||||
r'export\s+([A-Z_][A-Z0-9_]*)=', # export VAR_NAME=
|
||||
r'([A-Z_][A-Z0-9_]*)=', # VAR_NAME=
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
matches = re.finditer(pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {shell_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_all_env_usage(self) -> Set[str]:
|
||||
"""Find all environment variable usage across the project."""
|
||||
all_vars = set()
|
||||
|
||||
# Python files
|
||||
python_vars = self._find_env_usage_in_python()
|
||||
all_vars.update(python_vars)
|
||||
|
||||
# Config files
|
||||
config_vars = self._find_env_usage_in_config_files()
|
||||
all_vars.update(config_vars)
|
||||
|
||||
# Shell scripts
|
||||
shell_vars = self._find_env_usage_in_shell_scripts()
|
||||
all_vars.update(shell_vars)
|
||||
|
||||
return all_vars
|
||||
|
||||
def _check_missing_in_example(self, used_vars: Set[str], example_vars: Set[str]) -> Set[str]:
|
||||
"""Find variables used in code but missing from .env.example."""
|
||||
missing = used_vars - example_vars
|
||||
|
||||
# Filter out common system variables that don't need to be in .env.example
|
||||
system_vars = {
|
||||
'PATH', 'HOME', 'USER', 'SHELL', 'TERM', 'LANG', 'LC_ALL',
|
||||
'PYTHONPATH', 'PYTHONHOME', 'VIRTUAL_ENV', 'CONDA_DEFAULT_ENV',
|
||||
'GITHUB_ACTIONS', 'CI', 'TRAVIS', 'APPVEYOR', 'CIRCLECI',
|
||||
'HTTP_PROXY', 'HTTPS_PROXY', 'NO_PROXY', 'http_proxy', 'https_proxy',
|
||||
'PWD', 'OLDPWD', 'SHLVL', '_', 'HOSTNAME', 'HOSTTYPE', 'OSTYPE',
|
||||
'MACHTYPE', 'UID', 'GID', 'EUID', 'EGID', 'PS1', 'PS2', 'IFS',
|
||||
'DISPLAY', 'XAUTHORITY', 'DBUS_SESSION_BUS_ADDRESS', 'SSH_AUTH_SOCK',
|
||||
'SSH_CONNECTION', 'SSH_CLIENT', 'SSH_TTY', 'LOGNAME', 'USERNAME'
|
||||
}
|
||||
|
||||
return missing - system_vars
|
||||
|
||||
def _check_unused_in_example(self, used_vars: Set[str], example_vars: Set[str]) -> Set[str]:
|
||||
"""Find variables in .env.example but not used in code."""
|
||||
unused = example_vars - used_vars
|
||||
|
||||
# Filter out variables that might be used by external tools or services
|
||||
external_vars = {
|
||||
'NODE_ENV', 'NPM_CONFIG_PREFIX', 'NPM_AUTH_TOKEN',
|
||||
'DOCKER_HOST', 'DOCKER_TLS_VERIFY', 'DOCKER_CERT_PATH',
|
||||
'KUBERNETES_SERVICE_HOST', 'KUBERNETES_SERVICE_PORT',
|
||||
'REDIS_URL', 'MEMCACHED_URL', 'ELASTICSEARCH_URL',
|
||||
'SENTRY_DSN', 'ROLLBAR_ACCESS_TOKEN', 'HONEYBADGER_API_KEY'
|
||||
}
|
||||
|
||||
return unused - external_vars
|
||||
|
||||
def lint(self, verbose: bool = False) -> Tuple[int, int, int, Set[str], Set[str]]:
|
||||
"""Run the linter and return results."""
|
||||
print("🔍 Dotenv Linter for AITBC")
|
||||
print("=" * 50)
|
||||
|
||||
# Parse .env.example
|
||||
example_vars = self._parse_env_example()
|
||||
if verbose:
|
||||
print(f"📄 Found {len(example_vars)} variables in .env.example")
|
||||
if example_vars:
|
||||
print(f" {', '.join(sorted(example_vars))}")
|
||||
|
||||
# Find all environment variable usage
|
||||
used_vars = self._find_all_env_usage()
|
||||
if verbose:
|
||||
print(f"🔍 Found {len(used_vars)} variables used in code")
|
||||
if used_vars:
|
||||
print(f" {', '.join(sorted(used_vars))}")
|
||||
|
||||
# Check for missing variables
|
||||
missing_vars = self._check_missing_in_example(used_vars, example_vars)
|
||||
|
||||
# Check for unused variables
|
||||
unused_vars = self._check_unused_in_example(used_vars, example_vars)
|
||||
|
||||
return len(example_vars), len(used_vars), len(missing_vars), missing_vars, unused_vars
|
||||
|
||||
def fix_env_example(self, missing_vars: Set[str], verbose: bool = False):
|
||||
"""Add missing variables to .env.example."""
|
||||
if not missing_vars:
|
||||
if verbose:
|
||||
print("✅ No missing variables to add")
|
||||
return
|
||||
|
||||
print(f"🔧 Adding {len(missing_vars)} missing variables to .env.example")
|
||||
|
||||
with open(self.env_example_path, 'a') as f:
|
||||
f.write("\n# Auto-generated variables (added by dotenv_linter)\n")
|
||||
for var in sorted(missing_vars):
|
||||
f.write(f"{var}=\n")
|
||||
|
||||
print(f"✅ Added {len(missing_vars)} variables to .env.example")
|
||||
|
||||
def generate_report(self, example_count: int, used_count: int, missing_count: int,
|
||||
missing_vars: Set[str], unused_vars: Set[str]) -> str:
|
||||
"""Generate a detailed report."""
|
||||
report = []
|
||||
|
||||
report.append("📊 Dotenv Linter Report")
|
||||
report.append("=" * 50)
|
||||
report.append(f"Variables in .env.example: {example_count}")
|
||||
report.append(f"Variables used in code: {used_count}")
|
||||
report.append(f"Missing from .env.example: {missing_count}")
|
||||
report.append(f"Unused in .env.example: {len(unused_vars)}")
|
||||
report.append("")
|
||||
|
||||
if missing_vars:
|
||||
report.append("❌ Missing Variables (used in code but not in .env.example):")
|
||||
for var in sorted(missing_vars):
|
||||
report.append(f" - {var}")
|
||||
report.append("")
|
||||
|
||||
if unused_vars:
|
||||
report.append("⚠️ Unused Variables (in .env.example but not used in code):")
|
||||
for var in sorted(unused_vars):
|
||||
report.append(f" - {var}")
|
||||
report.append("")
|
||||
|
||||
if not missing_vars and not unused_vars:
|
||||
report.append("✅ No configuration drift detected!")
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Dotenv Linter for AITBC - Check for configuration drift",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python scripts/dotenv_linter.py # Check for drift
|
||||
python scripts/dotenv_linter.py --verbose # Verbose output
|
||||
python scripts/dotenv_linter.py --fix # Auto-fix missing variables
|
||||
python scripts/dotenv_linter.py --check # Exit with error code on issues
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
parser.add_argument("--fix", action="store_true", help="Auto-fix missing variables in .env.example")
|
||||
parser.add_argument("--check", action="store_true", help="Exit with error code if issues found")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Initialize linter
|
||||
linter = DotenvLinter()
|
||||
|
||||
# Run linting
|
||||
example_count, used_count, missing_count, missing_vars, unused_vars = linter.lint(args.verbose)
|
||||
|
||||
# Generate report
|
||||
report = linter.generate_report(example_count, used_count, missing_count, missing_vars, unused_vars)
|
||||
print(report)
|
||||
|
||||
# Auto-fix if requested
|
||||
if args.fix and missing_vars:
|
||||
linter.fix_env_example(missing_vars, args.verbose)
|
||||
|
||||
# Exit with error code if check requested and issues found
|
||||
if args.check and (missing_vars or unused_vars):
|
||||
print(f"❌ Configuration drift detected: {missing_count} missing, {len(unused_vars)} unused")
|
||||
sys.exit(1)
|
||||
|
||||
# Success
|
||||
print("✅ Dotenv linter completed successfully")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
151
dev/scripts/development/exchange-router-fixed.py
Normal file
151
dev/scripts/development/exchange-router-fixed.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""
|
||||
Bitcoin Exchange Router for AITBC
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||
from sqlmodel import Session
|
||||
import uuid
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
|
||||
from ..deps import require_admin_key, require_client_key
|
||||
from ..domain import Wallet
|
||||
from ..schemas import ExchangePaymentRequest, ExchangePaymentResponse
|
||||
|
||||
router = APIRouter(tags=["exchange"])
|
||||
|
||||
# In-memory storage for demo (use database in production)
|
||||
payments: Dict[str, Dict] = {}
|
||||
|
||||
# Bitcoin configuration
|
||||
BITCOIN_CONFIG = {
|
||||
'testnet': True,
|
||||
'main_address': 'tb1qxy2kgdygjrsqtzq2n0yrf2493p83kkfjhx0wlh', # Testnet address
|
||||
'exchange_rate': 100000, # 1 BTC = 100,000 AITBC
|
||||
'min_confirmations': 1,
|
||||
'payment_timeout': 3600 # 1 hour
|
||||
}
|
||||
|
||||
@router.post("/exchange/create-payment", response_model=ExchangePaymentResponse)
|
||||
async def create_payment(
|
||||
request: ExchangePaymentRequest,
|
||||
background_tasks: BackgroundTasks,
|
||||
api_key: str = require_client_key()
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a new Bitcoin payment request"""
|
||||
|
||||
# Validate request
|
||||
if request.aitbc_amount <= 0 or request.btc_amount <= 0:
|
||||
raise HTTPException(status_code=400, detail="Invalid amount")
|
||||
|
||||
# Calculate expected BTC amount
|
||||
expected_btc = request.aitbc_amount / BITCOIN_CONFIG['exchange_rate']
|
||||
|
||||
# Allow small difference for rounding
|
||||
if abs(request.btc_amount - expected_btc) > 0.00000001:
|
||||
raise HTTPException(status_code=400, detail="Amount mismatch")
|
||||
|
||||
# Create payment record
|
||||
payment_id = str(uuid.uuid4())
|
||||
payment = {
|
||||
'payment_id': payment_id,
|
||||
'user_id': request.user_id,
|
||||
'aitbc_amount': request.aitbc_amount,
|
||||
'btc_amount': request.btc_amount,
|
||||
'payment_address': BITCOIN_CONFIG['main_address'],
|
||||
'status': 'pending',
|
||||
'created_at': int(time.time()),
|
||||
'expires_at': int(time.time()) + BITCOIN_CONFIG['payment_timeout'],
|
||||
'confirmations': 0,
|
||||
'tx_hash': None
|
||||
}
|
||||
|
||||
# Store payment
|
||||
payments[payment_id] = payment
|
||||
|
||||
# Start payment monitoring in background
|
||||
background_tasks.add_task(monitor_payment, payment_id)
|
||||
|
||||
return payment
|
||||
|
||||
@router.get("/exchange/payment-status/{payment_id}")
|
||||
async def get_payment_status(payment_id: str) -> Dict[str, Any]:
|
||||
"""Get payment status"""
|
||||
|
||||
if payment_id not in payments:
|
||||
raise HTTPException(status_code=404, detail="Payment not found")
|
||||
|
||||
payment = payments[payment_id]
|
||||
|
||||
# Check if expired
|
||||
if payment['status'] == 'pending' and time.time() > payment['expires_at']:
|
||||
payment['status'] = 'expired'
|
||||
|
||||
return payment
|
||||
|
||||
@router.post("/exchange/confirm-payment/{payment_id}")
|
||||
async def confirm_payment(
|
||||
payment_id: str,
|
||||
tx_hash: str,
|
||||
api_key: str = require_admin_key()
|
||||
) -> Dict[str, Any]:
|
||||
"""Confirm payment (webhook from payment processor)"""
|
||||
|
||||
if payment_id not in payments:
|
||||
raise HTTPException(status_code=404, detail="Payment not found")
|
||||
|
||||
payment = payments[payment_id]
|
||||
|
||||
if payment['status'] != 'pending':
|
||||
raise HTTPException(status_code=400, detail="Payment not in pending state")
|
||||
|
||||
# Verify transaction (in production, verify with blockchain API)
|
||||
# For demo, we'll accept any tx_hash
|
||||
|
||||
payment['status'] = 'confirmed'
|
||||
payment['tx_hash'] = tx_hash
|
||||
payment['confirmed_at'] = int(time.time())
|
||||
|
||||
# Mint AITBC tokens to user's wallet
|
||||
try:
|
||||
from ..services.blockchain import mint_tokens
|
||||
await mint_tokens(payment['user_id'], payment['aitbc_amount'])
|
||||
except Exception as e:
|
||||
print(f"Error minting tokens: {e}")
|
||||
# In production, handle this error properly
|
||||
|
||||
return {
|
||||
'status': 'ok',
|
||||
'payment_id': payment_id,
|
||||
'aitbc_amount': payment['aitbc_amount']
|
||||
}
|
||||
|
||||
@router.get("/exchange/rates")
|
||||
async def get_exchange_rates() -> Dict[str, float]:
|
||||
"""Get current exchange rates"""
|
||||
|
||||
return {
|
||||
'btc_to_aitbc': BITCOIN_CONFIG['exchange_rate'],
|
||||
'aitbc_to_btc': 1.0 / BITCOIN_CONFIG['exchange_rate'],
|
||||
'fee_percent': 0.5
|
||||
}
|
||||
|
||||
async def monitor_payment(payment_id: str):
|
||||
"""Monitor payment for confirmation (background task)"""
|
||||
|
||||
import asyncio
|
||||
|
||||
while payment_id in payments:
|
||||
payment = payments[payment_id]
|
||||
|
||||
# Check if expired
|
||||
if payment['status'] == 'pending' and time.time() > payment['expires_at']:
|
||||
payment['status'] = 'expired'
|
||||
break
|
||||
|
||||
# In production, check blockchain for payment
|
||||
# For demo, we'll wait for manual confirmation
|
||||
|
||||
await asyncio.sleep(30) # Check every 30 seconds
|
||||
418
dev/scripts/development/focused_dotenv_linter.py
Executable file
418
dev/scripts/development/focused_dotenv_linter.py
Executable file
@@ -0,0 +1,418 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Focused Dotenv Linter for AITBC
|
||||
|
||||
This script specifically checks for environment variable usage patterns that
|
||||
actually require .env.example documentation, filtering out script variables and
|
||||
other non-environment variable patterns.
|
||||
|
||||
Usage:
|
||||
python scripts/focused_dotenv_linter.py
|
||||
python scripts/focused_dotenv_linter.py --fix
|
||||
python scripts/focused_dotenv_linter.py --verbose
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from typing import Set, Dict, List, Tuple
|
||||
import ast
|
||||
|
||||
|
||||
class FocusedDotenvLinter:
|
||||
"""Focused linter for actual environment variable usage."""
|
||||
|
||||
def __init__(self, project_root: Path = None):
|
||||
"""Initialize the linter."""
|
||||
self.project_root = project_root or Path(__file__).parent.parent
|
||||
self.env_example_path = self.project_root / ".env.example"
|
||||
self.python_files = self._find_python_files()
|
||||
|
||||
# Common script/internal variables to ignore
|
||||
self.script_vars = {
|
||||
'PID', 'PIDS', 'PID_FILE', 'CHILD_PIDS', 'API_PID', 'COORD_PID', 'MARKET_PID',
|
||||
'EXCHANGE_PID', 'NODE_PID', 'API_STATUS', 'FRONTEND_STATUS', 'CONTRACTS_STATUS',
|
||||
'NODE1_HEIGHT', 'NODE2_HEIGHT', 'NODE3_HEIGHT', 'NEW_NODE1_HEIGHT',
|
||||
'NEW_NODE2_HEIGHT', 'NEW_NODE3_HEIGHT', 'NODE3_STATUS', 'NODE3_NEW_STATUS',
|
||||
'OLD_DIFF', 'NEW_DIFF', 'DIFF12', 'DIFF23', 'NEW_DIFF', 'DIFF',
|
||||
'COVERAGE', 'MYTHRIL_REPORT', 'MYTHRIL_TEXT', 'SLITHER_REPORT', 'SLITHER_TEXT',
|
||||
'GITHUB_OUTPUT', 'GITHUB_PATH', 'GITHUB_STEP_SUMMARY', 'PYTEST_CURRENT_TEST',
|
||||
'NC', 'REPLY', 'RUNNER', 'TIMESTAMP', 'DATE', 'VERSION', 'SCRIPT_VERSION',
|
||||
'VERBOSE', 'DEBUG', 'DRY_RUN', 'AUTO_MODE', 'DEV_MODE', 'TEST_MODE',
|
||||
'PRODUCTION_MODE', 'ENVIRONMENT', 'APP_ENV', 'NODE_ENV', 'LIVE_SERVER',
|
||||
'LOCAL_MODEL_PATH', 'FASTTEXT_MODEL_PATH', 'BUILD_DIR', 'OUTPUT_DIR',
|
||||
'TEMP_DIR', 'TEMP_DEPLOY_DIR', 'BACKUP_DIR', 'BACKUP_FILE', 'BACKUP_NAME',
|
||||
'LOG_DIR', 'MONITORING_DIR', 'REPORT_DIR', 'DOCS_DIR', 'SCRIPTS_DIR',
|
||||
'SCRIPT_DIR', 'CONFIG_DIR', 'CONFIGS_DIR', 'CONFIGS', 'PACKAGES_DIR',
|
||||
'SERVICES_DIR', 'CONTRACTS_DIR', 'INFRA_DIR', 'FRONTEND_DIR', 'EXCHANGE_DIR',
|
||||
'EXPLORER_DIR', 'ROOT_DIR', 'PROJECT_ROOT', 'PROJECT_DIR', 'SOURCE_DIR',
|
||||
'VENV_DIR', 'INSTALL_DIR', 'DEBIAN_DIR', 'DEB_OUTPUT_DIR', 'DIST_DIR',
|
||||
'LEGACY_DIR', 'MIGRATION_EXAMPLES_DIR', 'GPU_ACCEL_DIR', 'ZK_DIR',
|
||||
'WHEEL_FILE', 'PACKAGE_FILE', 'PACKAGE_NAME', 'PACKAGE_VERSION', 'PACKAGE_PATH',
|
||||
'PACKAGE_SIZE', 'PKG_NAME', 'PKG_VERSION', 'PKG_PATH', 'PKG_IDENTIFIER',
|
||||
'PKG_INSTALL_LOCATION', 'PKG_MANAGER', 'PKG_PATHS', 'CUSTOM_PACKAGES',
|
||||
'SELECTED_PACKAGES', 'COMPONENTS', 'PHASES', 'REQUIRED_VERSION',
|
||||
'SCRIPTS', 'SERVICES', 'SERVERS', 'CONTAINER', 'CONTAINER_NAME', 'CONTAINER_IP',
|
||||
'DOMAIN', 'PORT', 'HOST', 'SERVER', 'SERVICE_NAME', 'NAMESPACE',
|
||||
'CLIENT_ID', 'CLIENT_REGION', 'CLIENT_KEY', 'CLIENT_WALLET', 'MINER_ID',
|
||||
'MINER_REGION', 'MINER_KEY', 'MINER_WALLET', 'AGENT_TYPE', 'CATEGORY',
|
||||
'NETWORK', 'CHAIN', 'CHAINS', 'CHAIN_ID', 'SUPPORTED_CHAINS',
|
||||
'NODE1', 'NODE2', 'NODE3', 'NODE_MAP', 'NODE1_CONFIG', 'NODE1_DIR',
|
||||
'NODE2_DIR', 'NODE3_DIR', 'NODE_ENV', 'PLATFORM', 'ARCH', 'ARCH_NAME',
|
||||
'CHIP_FAMILY', 'PYTHON_VERSION', 'BASH_VERSION', 'ZSH_VERSION',
|
||||
'DEBIAN_VERSION', 'SHELL_PROFILE', 'SHELL_RC', 'POWERSHELL_PROFILE',
|
||||
'SYSTEMD_PATH', 'WSL_SCRIPT_DIR', 'SSH_KEY', 'SSH_USER', 'SSL_CERT_PATH',
|
||||
'SSL_KEY_PATH', 'SSL_ENABLED', 'NGINX_CONFIG', 'WEB_ROOT', 'WEBHOOK_SECRET',
|
||||
'WORKERS', 'AUTO_SCALING', 'MAX_INSTANCES', 'MIN_INSTANCES', 'EMERGENCY_ONLY',
|
||||
'SKIP_BUILD', 'SKIP_TESTS', 'SKIP_SECURITY', 'SKIP_MONITORING', 'SKIP_VERIFICATION',
|
||||
'SKIP_FRONTEND', 'RESET', 'UPDATE', 'UPDATE_ALL', 'UPDATE_CLI', 'UPDATE_SERVICES',
|
||||
'INSTALL_CLI', 'INSTALL_SERVICES', 'UNINSTALL', 'UNINSTALL_CLI_ONLY',
|
||||
'UNINSTALL_SERVICES_ONLY', 'DEPLOY_CONTRACTS', 'DEPLOY_FRONTEND', 'DEPLOY_SERVICES',
|
||||
'BACKUP_BEFORE_DEPLOY', 'DEPLOY_PATH', 'COMPLETE_INSTALL', 'DIAGNOSE',
|
||||
'HEALTH_CHECK', 'HEALTH_URL', 'RUN_MYTHRIL', 'RUN_SLITHER', 'TEST_CONTRACTS',
|
||||
'VERIFY_CONTRACTS', 'SEND_AMOUNT', 'RETURN_ADDRESS', 'TXID', 'BALANCE',
|
||||
'MINT_PER_UNIT', 'MIN_CONFIRMATIONS', 'PRODUCTION_GAS_LIMIT', 'PRODUCTION_GAS_PRICE',
|
||||
'PRIVATE_KEY', 'PRODUCTION_PRIVATE_KEY', 'PROPOSER_KEY', 'ENCRYPTION_KEY',
|
||||
'BITCOIN_ADDRESS', 'BITCOIN_PRIVATE_KEY', 'BITCOIN_TESTNET', 'BTC_TO_AITBC_RATE',
|
||||
'VITE_APP_NAME', 'VITE_APP_VERSION', 'VITE_APP_DESCRIPTION', 'VITE_NETWORK_NAME',
|
||||
'VITE_CHAIN_ID', 'VITE_RPC_URL', 'VITE_WS_URL', 'VITE_API_BASE_URL',
|
||||
'VITE_ENABLE_ANALYTICS', 'VITE_ENABLE_ERROR_REPORTING', 'VITE_SENTRY_DSN',
|
||||
'VITE_AGENT_BOUNTY_ADDRESS', 'VITE_AGENT_STAKING_ADDRESS', 'VITE_AITBC_TOKEN_ADDRESS',
|
||||
'VITE_DISPUTE_RESOLUTION_ADDRESS', 'VITE_PERFORMANCE_VERIFIER_ADDRESS',
|
||||
'VITE_ESCROW_SERVICE_ADDRESS', 'COMPREHENSIVE', 'HIGH', 'MEDIUM', 'LOW',
|
||||
'RED', 'GREEN', 'YELLOW', 'BLUE', 'MAGENTA', 'CYAN', 'PURPLE', 'WHITE',
|
||||
'NC', 'EDITOR', 'PAGER', 'LANG', 'LC_ALL', 'TERM', 'SHELL', 'USER', 'HOME',
|
||||
'PATH', 'PWD', 'OLDPWD', 'SHLVL', '_', 'HOSTNAME', 'HOSTTYPE', 'OSTYPE',
|
||||
'MACHTYPE', 'UID', 'GID', 'EUID', 'EGID', 'PS1', 'PS2', 'IFS', 'DISPLAY',
|
||||
'XAUTHORITY', 'DBUS_SESSION_BUS_ADDRESS', 'SSH_AUTH_SOCK', 'SSH_CONNECTION',
|
||||
'SSH_CLIENT', 'SSH_TTY', 'LOGNAME', 'USERNAME', 'CURRENT_USER'
|
||||
}
|
||||
|
||||
def _find_python_files(self) -> List[Path]:
|
||||
"""Find all Python files in the project."""
|
||||
python_files = []
|
||||
for root, dirs, files in os.walk(self.project_root):
|
||||
# Skip hidden directories and common exclusions
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in {
|
||||
'__pycache__', 'node_modules', '.git', 'venv', 'env', '.venv'
|
||||
}]
|
||||
|
||||
for file in files:
|
||||
if file.endswith('.py'):
|
||||
python_files.append(Path(root) / file)
|
||||
|
||||
return python_files
|
||||
|
||||
def _parse_env_example(self) -> Set[str]:
|
||||
"""Parse .env.example and extract all environment variable keys."""
|
||||
env_vars = set()
|
||||
|
||||
if not self.env_example_path.exists():
|
||||
print(f"❌ .env.example not found at {self.env_example_path}")
|
||||
return env_vars
|
||||
|
||||
with open(self.env_example_path, 'r') as f:
|
||||
for line_num, line in enumerate(f, 1):
|
||||
line = line.strip()
|
||||
|
||||
# Skip comments and empty lines
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
# Extract variable name (everything before =)
|
||||
if '=' in line:
|
||||
var_name = line.split('=')[0].strip()
|
||||
if var_name:
|
||||
env_vars.add(var_name)
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_python(self) -> Set[str]:
|
||||
"""Find actual environment variable usage in Python files."""
|
||||
env_vars = set()
|
||||
|
||||
# More specific patterns for actual environment variables
|
||||
patterns = [
|
||||
r'os\.environ\.get\([\'"]([A-Z_][A-Z0-9_]*)[\'"]',
|
||||
r'os\.environ\[([\'"]([A-Z_][A-Z0-9_]*)[\'"])\]',
|
||||
r'os\.getenv\([\'"]([A-Z_][A-Z0-9_]*)[\'"]',
|
||||
r'getenv\([\'"]([A-Z_][A-Z0-9_]*)[\'"]',
|
||||
r'environ\.get\([\'"]([A-Z_][A-Z0-9_]*)[\'"]',
|
||||
r'environ\[([\'"]([A-Z_][A-Z0-9_]*)[\'"])\]',
|
||||
]
|
||||
|
||||
for python_file in self.python_files:
|
||||
try:
|
||||
with open(python_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
for pattern in patterns:
|
||||
matches = re.finditer(pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
# Only include if it looks like a real environment variable
|
||||
if var_name.isupper() and len(var_name) > 1:
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {python_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_config_files(self) -> Set[str]:
|
||||
"""Find environment variable usage in configuration files."""
|
||||
env_vars = set()
|
||||
|
||||
# Check common config files
|
||||
config_files = [
|
||||
'pyproject.toml',
|
||||
'pytest.ini',
|
||||
'setup.cfg',
|
||||
'tox.ini',
|
||||
'.github/workflows/*.yml',
|
||||
'.github/workflows/*.yaml',
|
||||
'docker-compose.yml',
|
||||
'docker-compose.yaml',
|
||||
'Dockerfile',
|
||||
]
|
||||
|
||||
for pattern in config_files:
|
||||
for config_file in self.project_root.glob(pattern):
|
||||
try:
|
||||
with open(config_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for environment variable patterns in config files
|
||||
env_patterns = [
|
||||
r'\${([A-Z_][A-Z0-9_]*)}', # ${VAR_NAME}
|
||||
r'\$([A-Z_][A-Z0-9_]*)', # $VAR_NAME
|
||||
r'env\.([A-Z_][A-Z0-9_]*)', # env.VAR_NAME
|
||||
r'os\.environ\([\'"]([A-Z_][A-Z0-9_]*)[\'"]', # os.environ("VAR_NAME")
|
||||
r'getenv\([\'"]([A-Z_][A-Z0-9_]*)[\'"]', # getenv("VAR_NAME")
|
||||
]
|
||||
|
||||
for env_pattern in env_patterns:
|
||||
matches = re.finditer(env_pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
if var_name.isupper() and len(var_name) > 1:
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {config_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_env_usage_in_shell_scripts(self) -> Set[str]:
|
||||
"""Find environment variable usage in shell scripts."""
|
||||
env_vars = set()
|
||||
|
||||
shell_files = []
|
||||
for root, dirs, files in os.walk(self.project_root):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in {
|
||||
'__pycache__', 'node_modules', '.git', 'venv', 'env', '.venv'
|
||||
}]
|
||||
|
||||
for file in files:
|
||||
if file.endswith(('.sh', '.bash', '.zsh')):
|
||||
shell_files.append(Path(root) / file)
|
||||
|
||||
for shell_file in shell_files:
|
||||
try:
|
||||
with open(shell_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
# Look for environment variable patterns in shell scripts
|
||||
patterns = [
|
||||
r'\$\{([A-Z_][A-Z0-9_]*)\}', # ${VAR_NAME}
|
||||
r'\$([A-Z_][A-Z0-9_]*)', # $VAR_NAME
|
||||
r'export\s+([A-Z_][A-Z0-9_]*)=', # export VAR_NAME=
|
||||
r'([A-Z_][A-Z0-9_]*)=', # VAR_NAME=
|
||||
]
|
||||
|
||||
for pattern in patterns:
|
||||
matches = re.finditer(pattern, content)
|
||||
for match in matches:
|
||||
var_name = match.group(1)
|
||||
if var_name.isupper() and len(var_name) > 1:
|
||||
env_vars.add(var_name)
|
||||
|
||||
except (UnicodeDecodeError, PermissionError) as e:
|
||||
print(f"⚠️ Could not read {shell_file}: {e}")
|
||||
|
||||
return env_vars
|
||||
|
||||
def _find_all_env_usage(self) -> Set[str]:
|
||||
"""Find all environment variable usage across the project."""
|
||||
all_vars = set()
|
||||
|
||||
# Python files
|
||||
python_vars = self._find_env_usage_in_python()
|
||||
all_vars.update(python_vars)
|
||||
|
||||
# Config files
|
||||
config_vars = self._find_env_usage_in_config_files()
|
||||
all_vars.update(config_vars)
|
||||
|
||||
# Shell scripts
|
||||
shell_vars = self._find_env_usage_in_shell_scripts()
|
||||
all_vars.update(shell_vars)
|
||||
|
||||
# Filter out script variables and system variables
|
||||
filtered_vars = all_vars - self.script_vars
|
||||
|
||||
# Additional filtering for common non-config variables
|
||||
non_config_vars = {
|
||||
'HTTP_PROXY', 'HTTPS_PROXY', 'NO_PROXY', 'http_proxy', 'https_proxy',
|
||||
'PYTHONPATH', 'PYTHONHOME', 'VIRTUAL_ENV', 'CONDA_DEFAULT_ENV',
|
||||
'GITHUB_ACTIONS', 'CI', 'TRAVIS', 'APPVEYOR', 'CIRCLECI',
|
||||
'LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'CLASSPATH',
|
||||
'JAVA_HOME', 'NODE_PATH', 'GOPATH', 'RUST_HOME',
|
||||
'XDG_CONFIG_HOME', 'XDG_DATA_HOME', 'XDG_CACHE_HOME',
|
||||
'TERM', 'COLUMNS', 'LINES', 'PS1', 'PS2', 'PROMPT_COMMAND'
|
||||
}
|
||||
|
||||
return filtered_vars - non_config_vars
|
||||
|
||||
def _check_missing_in_example(self, used_vars: Set[str], example_vars: Set[str]) -> Set[str]:
|
||||
"""Find variables used in code but missing from .env.example."""
|
||||
missing = used_vars - example_vars
|
||||
return missing
|
||||
|
||||
def _check_unused_in_example(self, used_vars: Set[str], example_vars: Set[str]) -> Set[str]:
|
||||
"""Find variables in .env.example but not used in code."""
|
||||
unused = example_vars - used_vars
|
||||
|
||||
# Filter out variables that might be used by external tools or services
|
||||
external_vars = {
|
||||
'NODE_ENV', 'NPM_CONFIG_PREFIX', 'NPM_AUTH_TOKEN',
|
||||
'DOCKER_HOST', 'DOCKER_TLS_VERIFY', 'DOCKER_CERT_PATH',
|
||||
'KUBERNETES_SERVICE_HOST', 'KUBERNETES_SERVICE_PORT',
|
||||
'REDIS_URL', 'MEMCACHED_URL', 'ELASTICSEARCH_URL',
|
||||
'SENTRY_DSN', 'ROLLBAR_ACCESS_TOKEN', 'HONEYBADGER_API_KEY'
|
||||
}
|
||||
|
||||
return unused - external_vars
|
||||
|
||||
def lint(self, verbose: bool = False) -> Tuple[int, int, int, Set[str], Set[str]]:
|
||||
"""Run the linter and return results."""
|
||||
print("🔍 Focused Dotenv Linter for AITBC")
|
||||
print("=" * 50)
|
||||
|
||||
# Parse .env.example
|
||||
example_vars = self._parse_env_example()
|
||||
if verbose:
|
||||
print(f"📄 Found {len(example_vars)} variables in .env.example")
|
||||
if example_vars:
|
||||
print(f" {', '.join(sorted(example_vars))}")
|
||||
|
||||
# Find all environment variable usage
|
||||
used_vars = self._find_all_env_usage()
|
||||
if verbose:
|
||||
print(f"🔍 Found {len(used_vars)} actual environment variables used in code")
|
||||
if used_vars:
|
||||
print(f" {', '.join(sorted(used_vars))}")
|
||||
|
||||
# Check for missing variables
|
||||
missing_vars = self._check_missing_in_example(used_vars, example_vars)
|
||||
|
||||
# Check for unused variables
|
||||
unused_vars = self._check_unused_in_example(used_vars, example_vars)
|
||||
|
||||
return len(example_vars), len(used_vars), len(missing_vars), missing_vars, unused_vars
|
||||
|
||||
def fix_env_example(self, missing_vars: Set[str], verbose: bool = False):
|
||||
"""Add missing variables to .env.example."""
|
||||
if not missing_vars:
|
||||
if verbose:
|
||||
print("✅ No missing variables to add")
|
||||
return
|
||||
|
||||
print(f"🔧 Adding {len(missing_vars)} missing variables to .env.example")
|
||||
|
||||
with open(self.env_example_path, 'a') as f:
|
||||
f.write("\n# Auto-generated variables (added by focused_dotenv_linter)\n")
|
||||
for var in sorted(missing_vars):
|
||||
f.write(f"{var}=\n")
|
||||
|
||||
print(f"✅ Added {len(missing_vars)} variables to .env.example")
|
||||
|
||||
def generate_report(self, example_count: int, used_count: int, missing_count: int,
|
||||
missing_vars: Set[str], unused_vars: Set[str]) -> str:
|
||||
"""Generate a detailed report."""
|
||||
report = []
|
||||
|
||||
report.append("📊 Focused Dotenv Linter Report")
|
||||
report.append("=" * 50)
|
||||
report.append(f"Variables in .env.example: {example_count}")
|
||||
report.append(f"Actual environment variables used: {used_count}")
|
||||
report.append(f"Missing from .env.example: {missing_count}")
|
||||
report.append(f"Unused in .env.example: {len(unused_vars)}")
|
||||
report.append("")
|
||||
|
||||
if missing_vars:
|
||||
report.append("❌ Missing Variables (used in code but not in .env.example):")
|
||||
for var in sorted(missing_vars):
|
||||
report.append(f" - {var}")
|
||||
report.append("")
|
||||
|
||||
if unused_vars:
|
||||
report.append("⚠️ Unused Variables (in .env.example but not used in code):")
|
||||
for var in sorted(unused_vars):
|
||||
report.append(f" - {var}")
|
||||
report.append("")
|
||||
|
||||
if not missing_vars and not unused_vars:
|
||||
report.append("✅ No configuration drift detected!")
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Focused Dotenv Linter for AITBC - Check for actual configuration drift",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
python scripts/focused_dotenv_linter.py # Check for drift
|
||||
python scripts/focused_dotenv_linter.py --verbose # Verbose output
|
||||
python scripts/focused_dotenv_linter.py --fix # Auto-fix missing variables
|
||||
python scripts/focused_dotenv_linter.py --check # Exit with error code on issues
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
parser.add_argument("--fix", action="store_true", help="Auto-fix missing variables in .env.example")
|
||||
parser.add_argument("--check", action="store_true", help="Exit with error code if issues found")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Initialize linter
|
||||
linter = FocusedDotenvLinter()
|
||||
|
||||
# Run linting
|
||||
example_count, used_count, missing_count, missing_vars, unused_vars = linter.lint(args.verbose)
|
||||
|
||||
# Generate report
|
||||
report = linter.generate_report(example_count, used_count, missing_count, missing_vars, unused_vars)
|
||||
print(report)
|
||||
|
||||
# Auto-fix if requested
|
||||
if args.fix and missing_vars:
|
||||
linter.fix_env_example(missing_vars, args.verbose)
|
||||
|
||||
# Exit with error code if check requested and issues found
|
||||
if args.check and (missing_vars or unused_vars):
|
||||
print(f"❌ Configuration drift detected: {missing_count} missing, {len(unused_vars)} unused")
|
||||
sys.exit(1)
|
||||
|
||||
# Success
|
||||
print("✅ Focused dotenv linter completed successfully")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
99
dev/scripts/development/generate_openapi.py
Executable file
99
dev/scripts/development/generate_openapi.py
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate OpenAPI specifications from FastAPI services
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import subprocess
|
||||
import requests
|
||||
from pathlib import Path
|
||||
|
||||
def extract_openapi_spec(service_name: str, base_url: str, output_file: str):
|
||||
"""Extract OpenAPI spec from a running FastAPI service"""
|
||||
try:
|
||||
# Get OpenAPI spec from the service
|
||||
response = requests.get(f"{base_url}/openapi.json")
|
||||
response.raise_for_status()
|
||||
|
||||
spec = response.json()
|
||||
|
||||
# Add service-specific metadata
|
||||
spec["info"]["title"] = f"AITBC {service_name} API"
|
||||
spec["info"]["description"] = f"OpenAPI specification for AITBC {service_name} service"
|
||||
spec["info"]["version"] = "1.0.0"
|
||||
|
||||
# Add servers configuration
|
||||
spec["servers"] = [
|
||||
{
|
||||
"url": "https://aitbc.bubuit.net/api",
|
||||
"description": "Production server"
|
||||
},
|
||||
{
|
||||
"url": "https://staging-api.aitbc.io",
|
||||
"description": "Staging server"
|
||||
},
|
||||
{
|
||||
"url": "http://localhost:8011",
|
||||
"description": "Development server"
|
||||
}
|
||||
]
|
||||
|
||||
# Save the spec
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(output_path, 'w') as f:
|
||||
json.dump(spec, f, indent=2)
|
||||
|
||||
print(f"✓ Generated {service_name} OpenAPI spec: {output_file}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Failed to generate {service_name} spec: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Generate OpenAPI specs for all AITBC services"""
|
||||
services = [
|
||||
{
|
||||
"name": "Coordinator API",
|
||||
"base_url": "http://127.0.0.2:8011",
|
||||
"output": "api/coordinator/openapi.json"
|
||||
},
|
||||
{
|
||||
"name": "Blockchain Node API",
|
||||
"base_url": "http://127.0.0.2:8080",
|
||||
"output": "api/blockchain/openapi.json"
|
||||
},
|
||||
{
|
||||
"name": "Wallet Daemon API",
|
||||
"base_url": "http://127.0.0.2:8071",
|
||||
"output": "api/wallet/openapi.json"
|
||||
}
|
||||
]
|
||||
|
||||
print("Generating OpenAPI specifications...")
|
||||
|
||||
all_success = True
|
||||
for service in services:
|
||||
success = extract_openapi_spec(
|
||||
service["name"],
|
||||
service["base_url"],
|
||||
service["output"]
|
||||
)
|
||||
if not success:
|
||||
all_success = False
|
||||
|
||||
if all_success:
|
||||
print("\n✓ All OpenAPI specifications generated successfully!")
|
||||
print("\nNext steps:")
|
||||
print("1. Review the generated specs")
|
||||
print("2. Commit them to the documentation repository")
|
||||
print("3. Update the API reference documentation")
|
||||
else:
|
||||
print("\n✗ Some specifications failed to generate")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
135
dev/scripts/development/local-domain-proxy.py
Executable file
135
dev/scripts/development/local-domain-proxy.py
Executable file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Local proxy to simulate domain routing for development
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import time
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Configuration
|
||||
DOMAIN = "aitbc.bubuit.net"
|
||||
SERVICES = {
|
||||
"api": {"port": 8000, "path": "/v1"},
|
||||
"rpc": {"port": 9080, "path": "/rpc"},
|
||||
"marketplace": {"port": 3001, "path": "/"},
|
||||
"exchange": {"port": 3002, "path": "/"},
|
||||
}
|
||||
|
||||
def start_services():
|
||||
"""Start all AITBC services"""
|
||||
print("🚀 Starting AITBC Services")
|
||||
print("=" * 40)
|
||||
|
||||
# Change to project directory
|
||||
os.chdir("/home/oib/windsurf/aitbc")
|
||||
|
||||
processes = {}
|
||||
|
||||
# Start Coordinator API
|
||||
print("\n1. Starting Coordinator API...")
|
||||
api_proc = subprocess.Popen([
|
||||
"python", "-m", "uvicorn",
|
||||
"src.app.main:app",
|
||||
"--host", "127.0.0.1",
|
||||
"--port", "8000"
|
||||
], cwd="apps/coordinator-api")
|
||||
processes["api"] = api_proc
|
||||
print(f" PID: {api_proc.pid}")
|
||||
|
||||
# Start Blockchain Node (if not running)
|
||||
print("\n2. Checking Blockchain Node...")
|
||||
result = subprocess.run(["lsof", "-i", ":9080"], capture_output=True)
|
||||
if not result.stdout:
|
||||
print(" Starting Blockchain Node...")
|
||||
node_proc = subprocess.Popen([
|
||||
"python", "-m", "uvicorn",
|
||||
"aitbc_chain.app:app",
|
||||
"--host", "127.0.0.1",
|
||||
"--port", "9080"
|
||||
], cwd="apps/blockchain-node")
|
||||
processes["blockchain"] = node_proc
|
||||
print(f" PID: {node_proc.pid}")
|
||||
else:
|
||||
print(" ✅ Already running")
|
||||
|
||||
# Start Marketplace UI
|
||||
print("\n3. Starting Marketplace UI...")
|
||||
market_proc = subprocess.Popen([
|
||||
"python", "server.py",
|
||||
"--port", "3001"
|
||||
], cwd="apps/marketplace-ui")
|
||||
processes["marketplace"] = market_proc
|
||||
print(f" PID: {market_proc.pid}")
|
||||
|
||||
# Start Trade Exchange
|
||||
print("\n4. Starting Trade Exchange...")
|
||||
exchange_proc = subprocess.Popen([
|
||||
"python", "server.py",
|
||||
"--port", "3002"
|
||||
], cwd="apps/trade-exchange")
|
||||
processes["exchange"] = exchange_proc
|
||||
print(f" PID: {exchange_proc.pid}")
|
||||
|
||||
# Wait for services to start
|
||||
print("\n⏳ Waiting for services to start...")
|
||||
time.sleep(5)
|
||||
|
||||
# Test endpoints
|
||||
print("\n🧪 Testing Services:")
|
||||
test_endpoints()
|
||||
|
||||
print("\n✅ All services started!")
|
||||
print("\n📋 Local URLs:")
|
||||
print(f" API: http://127.0.0.1:8000/v1")
|
||||
print(f" RPC: http://127.0.0.1:9080/rpc")
|
||||
print(f" Marketplace: http://127.0.0.1:3001")
|
||||
print(f" Exchange: http://127.0.0.1:3002")
|
||||
|
||||
print("\n🌐 Domain URLs (when proxied):")
|
||||
print(f" API: https://{DOMAIN}/api")
|
||||
print(f" RPC: https://{DOMAIN}/rpc")
|
||||
print(f" Marketplace: https://{DOMAIN}/Marketplace")
|
||||
print(f" Exchange: https://{DOMAIN}/Exchange")
|
||||
print(f" Admin: https://{DOMAIN}/admin")
|
||||
|
||||
print("\n🛑 Press Ctrl+C to stop all services")
|
||||
|
||||
try:
|
||||
# Keep running
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print("\n\n🛑 Stopping services...")
|
||||
for name, proc in processes.items():
|
||||
print(f" Stopping {name}...")
|
||||
proc.terminate()
|
||||
proc.wait()
|
||||
print("✅ All services stopped!")
|
||||
|
||||
def test_endpoints():
|
||||
"""Test if services are responding"""
|
||||
import requests
|
||||
|
||||
endpoints = [
|
||||
("API Health", "http://127.0.0.1:8000/v1/health"),
|
||||
("Admin Stats", "http://127.0.0.1:8000/v1/admin/stats"),
|
||||
("Marketplace", "http://127.0.0.1:3001"),
|
||||
("Exchange", "http://127.0.0.1:3002"),
|
||||
]
|
||||
|
||||
for name, url in endpoints:
|
||||
try:
|
||||
if "admin" in url:
|
||||
response = requests.get(url, headers={"X-Api-Key": "${ADMIN_API_KEY}"}, timeout=2)
|
||||
else:
|
||||
response = requests.get(url, timeout=2)
|
||||
print(f" {name}: ✅ {response.status_code}")
|
||||
except Exception as e:
|
||||
print(f" {name}: ❌ {str(e)[:50]}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_services()
|
||||
32
dev/scripts/development/parse_issues.py
Normal file
32
dev/scripts/development/parse_issues.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import re
|
||||
|
||||
with open("docs/10_plan/99_currentissue.md", "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# We know that Phase 8 is completely done and documented in docs/13_tasks/completed_phases/
|
||||
# We should only keep the actual warnings and blockers that might still be relevant,
|
||||
# and remove all the "Completed", "Results", "Achievements" sections.
|
||||
|
||||
# Let's extract only lines with warning/pending emojis
|
||||
lines = content.split("\n")
|
||||
kept_lines = []
|
||||
|
||||
for line in lines:
|
||||
if line.startswith("# Current Issues"):
|
||||
kept_lines.append(line)
|
||||
elif line.startswith("## Current"):
|
||||
kept_lines.append(line)
|
||||
elif any(icon in line for icon in ['⚠️', '⏳', '🔄']) and '✅' not in line:
|
||||
kept_lines.append(line)
|
||||
elif line.startswith("### "):
|
||||
kept_lines.append("\n" + line)
|
||||
elif line.startswith("#### "):
|
||||
kept_lines.append("\n" + line)
|
||||
|
||||
# Clean up empty headers
|
||||
new_content = "\n".join(kept_lines)
|
||||
new_content = re.sub(r'#+\s+[^\n]+\n+(?=#)', '\n', new_content)
|
||||
new_content = re.sub(r'\n{3,}', '\n\n', new_content)
|
||||
|
||||
with open("docs/10_plan/99_currentissue.md", "w") as f:
|
||||
f.write(new_content.strip() + '\n')
|
||||
Reference in New Issue
Block a user