diff --git a/apps/blockchain-node/src/aitbc_chain/contracts/guardian_contract.py b/apps/blockchain-node/src/aitbc_chain/contracts/guardian_contract.py index 3640c97c..1bca606c 100755 --- a/apps/blockchain-node/src/aitbc_chain/contracts/guardian_contract.py +++ b/apps/blockchain-node/src/aitbc_chain/contracts/guardian_contract.py @@ -157,7 +157,7 @@ class GuardianContract: # Validate address try: to_address = to_checksum_address(to_address) - except: + except Exception: return { "status": "rejected", "reason": "Invalid recipient address", diff --git a/packages/py/aitbc-agent-sdk/aitbc_agent/agent.py b/packages/py/aitbc-agent-sdk/aitbc_agent/agent.py index bedfdc73..43085e74 100755 --- a/packages/py/aitbc-agent-sdk/aitbc_agent/agent.py +++ b/packages/py/aitbc-agent-sdk/aitbc_agent/agent.py @@ -4,6 +4,7 @@ Core Agent class for AITBC network participation import asyncio import json +import logging import uuid from datetime import datetime from typing import Dict, List, Optional, Any @@ -13,6 +14,8 @@ from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import padding +logger = logging.getLogger(__name__) + @dataclass class AgentCapabilities: """Agent capability specification""" @@ -73,7 +76,7 @@ class AgentIdentity: hashes.SHA256() ) return True - except: + except Exception: return False class Agent: @@ -152,11 +155,11 @@ class Agent: await asyncio.sleep(1) # Simulate network call self.registered = True - print(f"Agent {self.identity.id} registered successfully") + logger.info(f"Agent {self.identity.id} registered successfully") return True except Exception as e: - print(f"Registration failed: {e}") + logger.error(f"Registration failed: {e}") return False async def get_reputation(self) -> Dict[str, float]: @@ -172,7 +175,7 @@ class Agent: async def update_reputation(self, new_score: float) -> None: """Update agent reputation score""" self.reputation_score = new_score - print(f"Reputation updated to {new_score}") + logger.info(f"Reputation updated to {new_score}") async def get_earnings(self, period: str = "30d") -> Dict[str, Any]: """Get agent earnings information""" @@ -199,7 +202,7 @@ class Agent: message["signature"] = signature # TODO: Send through AITBC agent messaging protocol - print(f"Message sent to {recipient_id}: {message_type}") + logger.info(f"Message sent to {recipient_id}: {message_type}") return True async def receive_message(self, message: Dict[str, Any]) -> bool: @@ -210,7 +213,7 @@ class Agent: # TODO: Verify sender's signature # For now, just process the message - print(f"Received message from {message.get('from')}: {message.get('type')}") + logger.info(f"Received message from {message.get('from')}: {message.get('type')}") return True def to_dict(self) -> Dict[str, Any]: diff --git a/packages/py/aitbc-agent-sdk/aitbc_agent/compute_provider.py b/packages/py/aitbc-agent-sdk/aitbc_agent/compute_provider.py index 4b058849..3f0e2021 100755 --- a/packages/py/aitbc-agent-sdk/aitbc_agent/compute_provider.py +++ b/packages/py/aitbc-agent-sdk/aitbc_agent/compute_provider.py @@ -3,11 +3,14 @@ Compute Provider Agent - for agents that provide computational resources """ import asyncio +import logging from typing import Dict, List, Optional, Any from datetime import datetime, timedelta from dataclasses import dataclass from .agent import Agent, AgentCapabilities +logger = logging.getLogger(__name__) + @dataclass class ResourceOffer: """Resource offering specification""" @@ -66,11 +69,11 @@ class ComputeProvider(Agent): await self._submit_to_marketplace(offer) self.current_offers.append(offer) - print(f"Resource offer submitted: {price_per_hour} AITBC/hour") + logger.info(f"Resource offer submitted: {price_per_hour} AITBC/hour") return True except Exception as e: - print(f"Failed to offer resources: {e}") + logger.error(f"Failed to offer resources: {e}") return False async def set_availability(self, schedule: Dict[str, Any]) -> bool: @@ -81,11 +84,11 @@ class ComputeProvider(Agent): offer.availability_schedule = schedule await self._update_marketplace_offer(offer) - print("Availability schedule updated") + logger.info("Availability schedule updated") return True except Exception as e: - print(f"Failed to update availability: {e}") + logger.error(f"Failed to update availability: {e}") return False async def enable_dynamic_pricing(self, base_rate: float, demand_threshold: float = 0.8, max_multiplier: float = 2.0, adjustment_frequency: str = "15min") -> bool: @@ -102,11 +105,11 @@ class ComputeProvider(Agent): # Start dynamic pricing task asyncio.create_task(self._dynamic_pricing_loop()) - print("Dynamic pricing enabled") + logger.info("Dynamic pricing enabled") return True except Exception as e: - print(f"Failed to enable dynamic pricing: {e}") + logger.error(f"Failed to enable dynamic pricing: {e}") return False async def _dynamic_pricing_loop(self): @@ -134,10 +137,10 @@ class ComputeProvider(Agent): offer.price_per_hour = new_price await self._update_marketplace_offer(offer) - print(f"Dynamic pricing: utilization={current_utilization:.2f}, price={new_price:.3f} AITBC/h") + logger.debug(f"Dynamic pricing: utilization={current_utilization:.2f}, price={new_price:.3f} AITBC/h") except Exception as e: - print(f"Dynamic pricing error: {e}") + logger.error(f"Dynamic pricing error: {e}") # Wait for next adjustment await asyncio.sleep(900) # 15 minutes @@ -163,11 +166,11 @@ class ComputeProvider(Agent): # Execute job (simulate) asyncio.create_task(self._execute_job(job, job_request)) - print(f"Job accepted: {job.job_id} from {job.consumer_id}") + logger.info(f"Job accepted: {job.job_id} from {job.consumer_id}") return True except Exception as e: - print(f"Failed to accept job: {e}") + logger.error(f"Failed to accept job: {e}") return False async def _execute_job(self, job: JobExecution, job_request: Dict[str, Any]): @@ -193,11 +196,11 @@ class ComputeProvider(Agent): # Notify consumer await self._notify_job_completion(job, earnings) - print(f"Job completed: {job.job_id}, earned {earnings} AITBC") + logger.info(f"Job completed: {job.job_id}, earned {earnings} AITBC") except Exception as e: job.status = "failed" - print(f"Job execution failed: {job.job_id} - {e}") + logger.error(f"Job execution failed: {job.job_id} - {e}") async def _notify_job_completion(self, job: JobExecution, earnings: float): """Notify consumer about job completion""" diff --git a/packages/py/aitbc-agent-sdk/aitbc_agent/swarm_coordinator.py b/packages/py/aitbc-agent-sdk/aitbc_agent/swarm_coordinator.py index b2425a20..4b01a873 100755 --- a/packages/py/aitbc-agent-sdk/aitbc_agent/swarm_coordinator.py +++ b/packages/py/aitbc-agent-sdk/aitbc_agent/swarm_coordinator.py @@ -4,11 +4,14 @@ Swarm Coordinator - for agents participating in collective intelligence import asyncio import json +import logging from typing import Dict, List, Optional, Any from datetime import datetime from dataclasses import dataclass from .agent import Agent +logger = logging.getLogger(__name__) + @dataclass class SwarmMessage: """Swarm communication message""" @@ -81,11 +84,11 @@ class SwarmCoordinator(Agent): # Start swarm participation tasks asyncio.create_task(self._swarm_participation_loop(swarm_id)) - print(f"Joined swarm: {swarm_id} as {config.get('role', 'participant')}") + logger.info(f"Joined swarm: {swarm_id} as {config.get('role', 'participant')}") return True except Exception as e: - print(f"Failed to join swarm {swarm_type}: {e}") + logger.error(f"Failed to join swarm {swarm_type}: {e}") return False async def _swarm_participation_loop(self, swarm_id: str): @@ -107,7 +110,7 @@ class SwarmCoordinator(Agent): swarm_config["last_activity"] = datetime.utcnow().isoformat() except Exception as e: - print(f"Swarm participation error for {swarm_id}: {e}") + logger.error(f"Swarm participation error for {swarm_id}: {e}") # Wait before next participation cycle await asyncio.sleep(60) # 1 minute @@ -135,11 +138,11 @@ class SwarmCoordinator(Agent): # Update contribution count self.joined_swarms[message.swarm_id]["contribution_count"] += 1 - print(f"Broadcasted to swarm {message.swarm_id}: {message.message_type}") + logger.info(f"Broadcasted to swarm {message.swarm_id}: {message.message_type}") return True except Exception as e: - print(f"Failed to broadcast to swarm: {e}") + logger.error(f"Failed to broadcast to swarm: {e}") return False async def _contribute_swarm_data(self, swarm_id: str): @@ -169,7 +172,7 @@ class SwarmCoordinator(Agent): await self.broadcast_to_swarm(message) except Exception as e: - print(f"Failed to contribute swarm data: {e}") + logger.error(f"Failed to contribute swarm data: {e}") async def _get_load_balancing_data(self) -> Dict[str, Any]: """Get load balancing data for swarm contribution""" @@ -237,11 +240,11 @@ class SwarmCoordinator(Agent): # Submit to swarm for coordination coordination_result = await self._submit_coordination_proposal(proposal) - print(f"Task coordination initiated: {task} with {collaborators} collaborators") + logger.info(f"Task coordination initiated: {task} with {collaborators} collaborators") return coordination_result except Exception as e: - print(f"Failed to coordinate task: {e}") + logger.error(f"Failed to coordinate task: {e}") return {"success": False, "error": str(e)} async def get_market_intelligence(self) -> Dict[str, Any]: @@ -275,7 +278,7 @@ class SwarmCoordinator(Agent): return {"error": "Not joined to pricing swarm"} except Exception as e: - print(f"Failed to get market intelligence: {e}") + logger.error(f"Failed to get market intelligence: {e}") return {"error": str(e)} async def analyze_swarm_benefits(self) -> Dict[str, Any]: @@ -302,7 +305,7 @@ class SwarmCoordinator(Agent): } except Exception as e: - print(f"Failed to analyze swarm benefits: {e}") + logger.error(f"Failed to analyze swarm benefits: {e}") return {"error": str(e)} async def _register_with_swarm(self, swarm_id: str, registration: Dict[str, Any]): diff --git a/scripts/qa-cycle.py b/scripts/qa-cycle.py new file mode 100755 index 00000000..cb9d0440 --- /dev/null +++ b/scripts/qa-cycle.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python3 +""" +QA Cycle: Run tests, exercise scenarios, find bugs, perform code reviews. +Runs periodically to ensure repository health and discover regressions. +""" +import os +import subprocess +import json +import sys +import shutil +import time +import random +from datetime import datetime +from pathlib import Path + +# Jitter: random delay up to 15 minutes (900 seconds) +time.sleep(random.randint(0, 900)) + +REPO_DIR = '/opt/aitbc' +LOG_FILE = '/opt/aitbc/qa-cycle.log' +TOKEN_FILE = '/opt/aitbc/.gitea_token.sh' + +def get_token(): + if os.path.exists(TOKEN_FILE): + with open(TOKEN_FILE) as f: + for line in f: + if line.strip().startswith('GITEA_TOKEN='): + return line.strip().split('=', 1)[1].strip() + return os.getenv('GITEA_TOKEN', '') + +GITEA_TOKEN = get_token() +API_BASE = os.getenv('GITEA_API_BASE', 'http://gitea.bubuit.net:3000/api/v1') +REPO = 'oib/aitbc' + +def log(msg): + now = datetime.utcnow().isoformat() + 'Z' + with open(LOG_FILE, 'a') as f: + f.write(f"[{now}] {msg}\n") + print(msg) + +def run_cmd(cmd, cwd=REPO_DIR, timeout=300): + try: + result = subprocess.run(cmd, shell=True, cwd=cwd, capture_output=True, text=True, timeout=timeout) + return result.returncode, result.stdout, result.stderr + except subprocess.TimeoutExpired: + return -1, "", "timeout" + except Exception as e: + return -2, "", str(e) + +def fetch_latest_main(): + log("Fetching latest main...") + rc, out, err = run_cmd("git fetch origin main") + if rc != 0: + log(f"Fetch failed: {err}") + return False + rc, out, err = run_cmd("git checkout main") + if rc != 0: + log(f"Checkout main failed: {err}") + return False + rc, out, err = run_cmd("git reset --hard origin/main") + if rc != 0: + log(f"Reset to origin/main failed: {err}") + return False + log("Main updated to latest.") + return True + +def run_tests(): + log("Running test suites...") + results = [] + for pkg in ['aitbc-core', 'aitbc-sdk', 'aitbc-crypto']: + testdir = f"packages/py/{pkg}/tests" + if not os.path.exists(os.path.join(REPO_DIR, testdir)): + continue + log(f"Testing {pkg}...") + rc, out, err = run_cmd(f"python3 -m pytest {testdir} -q", timeout=120) + if rc == 0: + log(f"✅ {pkg} tests passed.") + else: + log(f"❌ {pkg} tests failed (rc={rc}). Output: {out}\nError: {err}") + results.append((pkg, rc == 0)) + return results + +def run_lint(): + log("Running linters (flake8 if available)...") + if shutil.which('flake8'): + rc, out, err = run_cmd("flake8 packages/py/ --count --select=E9,F63,F7,F82 --show-source --statistics", timeout=60) + if rc == 0: + log("✅ No critical lint errors.") + else: + log(f"❌ Lint errors: {out}") + else: + log("flake8 not installed; skipping lint.") + +def query_api(path, method='GET', data=None): + import urllib.request + import urllib.error + url = f"{API_BASE}/{path}" + headers = {'Authorization': f'token {GITEA_TOKEN}'} + if data: + headers['Content-Type'] = 'application/json' + data = json.dumps(data).encode() + req = urllib.request.Request(url, method=method, headers=headers, data=data) + try: + with urllib.request.urlopen(req, timeout=30) as resp: + return json.load(resp) + except Exception as e: + log(f"API error {path}: {e}") + return None + +def review_my_open_prs(): + log("Checking my open PRs for missing reviews...") + my_prs = query_api(f'repos/{REPO}/pulls?state=open&author={MY_AGENT}') or [] + for pr in my_prs: + num = pr['number'] + title = pr['title'] + requested = pr.get('requested_reviewers', []) + if not any(r.get('login') == SIBLING_AGENT for r in requested): + log(f"PR #{num} '{title}' missing sibling review. Requesting...") + query_api(f'repos/{REPO}/pulls/{num}/requested_reviewers', method='POST', data={'reviewers': [SIBLING_AGENT]}) + else: + log(f"PR #{num} already has sibling review requested.") + +def synthesize_status(): + log("Collecting repository status...") + issues = query_api(f'repos/{REPO}/issues?state=open') or [] + prs = query_api(f'repos/{REPO}/pulls?state=open') or [] + log(f"Open issues: {len(issues)}, open PRs: {len(prs)}") + unassigned_issues = [i for i in issues if not i.get('assignees') and 'pull_request' not in i] + log(f"Unassigned issues: {len(unassigned_issues)}") + if unassigned_issues: + for i in unassigned_issues[:3]: + log(f" - #{i['number']} {i['title'][:50]}") + # Check CI for open PRs + for pr in prs: + num = pr['number'] + statuses = query_api(f'repos/{REPO}/commits/{pr["head"]["sha"]}/statuses') or [] + failing = [s for s in statuses if s.get('status') not in ('success', 'pending')] + if failing: + log(f"PR #{num} has failing checks: {', '.join(s.get('context','?') for s in failing)}") + +def main(): + now = datetime.utcnow().isoformat() + 'Z' + log(f"\n=== QA Cycle start: {now} ===") + if not GITEA_TOKEN: + log("GITEA_TOKEN not set; aborting.") + sys.exit(1) + global MY_AGENT, SIBLING_AGENT + MY_AGENT = os.getenv('AGENT_NAME', 'aitbc1') + SIBLING_AGENT = 'aitbc' if MY_AGENT == 'aitbc1' else 'aitbc1' + if not fetch_latest_main(): + log("Aborting due to fetch failure.") + return + run_tests() + run_lint() + review_my_open_prs() + synthesize_status() + log(f"=== QA Cycle complete ===") + +if __name__ == '__main__': + main()