Add sys import to test files and remove obsolete integration tests
Some checks failed
API Endpoint Tests / test-api-endpoints (push) Successful in 9s
Blockchain Synchronization Verification / sync-verification (push) Failing after 1s
CLI Tests / test-cli (push) Failing after 3s
Documentation Validation / validate-docs (push) Successful in 6s
Documentation Validation / validate-policies-strict (push) Successful in 2s
Integration Tests / test-service-integration (push) Successful in 40s
Multi-Node Blockchain Health Monitoring / health-check (push) Successful in 1s
P2P Network Verification / p2p-verification (push) Successful in 2s
Production Tests / Production Integration Tests (push) Successful in 21s
Python Tests / test-python (push) Successful in 13s
Security Scanning / security-scan (push) Failing after 46s
Smart Contract Tests / test-solidity (map[name:aitbc-token path:packages/solidity/aitbc-token]) (push) Successful in 17s
Smart Contract Tests / lint-solidity (push) Successful in 10s
Some checks failed
API Endpoint Tests / test-api-endpoints (push) Successful in 9s
Blockchain Synchronization Verification / sync-verification (push) Failing after 1s
CLI Tests / test-cli (push) Failing after 3s
Documentation Validation / validate-docs (push) Successful in 6s
Documentation Validation / validate-policies-strict (push) Successful in 2s
Integration Tests / test-service-integration (push) Successful in 40s
Multi-Node Blockchain Health Monitoring / health-check (push) Successful in 1s
P2P Network Verification / p2p-verification (push) Successful in 2s
Production Tests / Production Integration Tests (push) Successful in 21s
Python Tests / test-python (push) Successful in 13s
Security Scanning / security-scan (push) Failing after 46s
Smart Contract Tests / test-solidity (map[name:aitbc-token path:packages/solidity/aitbc-token]) (push) Successful in 17s
Smart Contract Tests / lint-solidity (push) Successful in 10s
- Add sys import to 29 test files across agent-coordinator, blockchain-event-bridge, blockchain-node, and coordinator-api - Remove apps/blockchain-event-bridge/tests/test_integration.py (obsolete bridge integration tests) - Remove apps/coordinator-api/tests/test_integration.py (obsolete API integration tests) - Implement GPU registration in marketplace_gpu.py with GPURegistry model persistence
This commit is contained in:
1
apps/plugin-analytics/tests/__init__.py
Normal file
1
apps/plugin-analytics/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Plugin analytics service tests"""
|
||||
168
apps/plugin-analytics/tests/test_edge_cases_plugin_analytics.py
Normal file
168
apps/plugin-analytics/tests/test_edge_cases_plugin_analytics.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""Edge case and error handling tests for plugin analytics service"""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from fastapi.testclient import TestClient
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from main import app, PluginUsage, PluginPerformance, PluginRating, PluginEvent, plugin_usage_data, plugin_performance_data, plugin_ratings, plugin_events
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_state():
|
||||
"""Reset global state before each test"""
|
||||
plugin_usage_data.clear()
|
||||
plugin_performance_data.clear()
|
||||
plugin_ratings.clear()
|
||||
plugin_events.clear()
|
||||
yield
|
||||
plugin_usage_data.clear()
|
||||
plugin_performance_data.clear()
|
||||
plugin_ratings.clear()
|
||||
plugin_events.clear()
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_usage_empty_plugin_id():
|
||||
"""Test PluginUsage with empty plugin_id"""
|
||||
usage = PluginUsage(
|
||||
plugin_id="",
|
||||
user_id="user_123",
|
||||
action="install",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert usage.plugin_id == ""
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_performance_negative_values():
|
||||
"""Test PluginPerformance with negative values"""
|
||||
perf = PluginPerformance(
|
||||
plugin_id="plugin_123",
|
||||
version="1.0.0",
|
||||
cpu_usage=-10.0,
|
||||
memory_usage=-5.0,
|
||||
response_time=-0.1,
|
||||
error_rate=-0.01,
|
||||
uptime=-50.0,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert perf.cpu_usage == -10.0
|
||||
assert perf.memory_usage == -5.0
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_rating_out_of_range():
|
||||
"""Test PluginRating with out of range rating"""
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=10,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert rating.rating == 10
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_rating_zero():
|
||||
"""Test PluginRating with zero rating"""
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=0,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert rating.rating == 0
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_usage_no_data():
|
||||
"""Test getting plugin usage when no data exists"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/usage/nonexistent")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["total_records"] == 0
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_performance_no_data():
|
||||
"""Test getting plugin performance when no data exists"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/performance/nonexistent")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["total_records"] == 0
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_ratings_no_data():
|
||||
"""Test getting plugin ratings when no data exists"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/ratings/nonexistent")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["total_ratings"] == 0
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_dashboard_with_no_data():
|
||||
"""Test dashboard with no data"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/dashboard")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["dashboard"]["overview"]["total_plugins"] == 0
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_record_multiple_usage_events():
|
||||
"""Test recording multiple usage events for same plugin"""
|
||||
client = TestClient(app)
|
||||
|
||||
for i in range(5):
|
||||
usage = PluginUsage(
|
||||
plugin_id="plugin_123",
|
||||
user_id=f"user_{i}",
|
||||
action="use",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
client.post("/api/v1/analytics/usage", json=usage.model_dump(mode='json'))
|
||||
|
||||
response = client.get("/api/v1/analytics/usage/plugin_123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["total_records"] == 5
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_usage_trends_days_parameter():
|
||||
"""Test usage trends with custom days parameter"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/trends?days=7")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "trends" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_usage_days_parameter():
|
||||
"""Test getting plugin usage with custom days parameter"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/usage/plugin_123?days=7")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["period_days"] == 7
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_performance_hours_parameter():
|
||||
"""Test getting plugin performance with custom hours parameter"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/performance/plugin_123?hours=12")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["period_hours"] == 12
|
||||
253
apps/plugin-analytics/tests/test_integration_plugin_analytics.py
Normal file
253
apps/plugin-analytics/tests/test_integration_plugin_analytics.py
Normal file
@@ -0,0 +1,253 @@
|
||||
"""Integration tests for plugin analytics service"""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from fastapi.testclient import TestClient
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from main import app, PluginUsage, PluginPerformance, PluginRating, PluginEvent, plugin_usage_data, plugin_performance_data, plugin_ratings, plugin_events
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_state():
|
||||
"""Reset global state before each test"""
|
||||
plugin_usage_data.clear()
|
||||
plugin_performance_data.clear()
|
||||
plugin_ratings.clear()
|
||||
plugin_events.clear()
|
||||
yield
|
||||
plugin_usage_data.clear()
|
||||
plugin_performance_data.clear()
|
||||
plugin_ratings.clear()
|
||||
plugin_events.clear()
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_root_endpoint():
|
||||
"""Test root endpoint"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["service"] == "AITBC Plugin Analytics Service"
|
||||
assert data["status"] == "running"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_health_check_endpoint():
|
||||
"""Test health check endpoint"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert "total_usage_records" in data
|
||||
assert "total_performance_records" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_record_plugin_usage():
|
||||
"""Test recording plugin usage"""
|
||||
client = TestClient(app)
|
||||
usage = PluginUsage(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
action="install",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
response = client.post("/api/v1/analytics/usage", json=usage.model_dump(mode='json'))
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["usage_id"]
|
||||
assert data["status"] == "recorded"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_record_plugin_performance():
|
||||
"""Test recording plugin performance"""
|
||||
client = TestClient(app)
|
||||
perf = PluginPerformance(
|
||||
plugin_id="plugin_123",
|
||||
version="1.0.0",
|
||||
cpu_usage=50.5,
|
||||
memory_usage=30.2,
|
||||
response_time=0.123,
|
||||
error_rate=0.001,
|
||||
uptime=99.9,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
response = client.post("/api/v1/analytics/performance", json=perf.model_dump(mode='json'))
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["performance_id"]
|
||||
assert data["status"] == "recorded"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_record_plugin_rating():
|
||||
"""Test recording plugin rating"""
|
||||
client = TestClient(app)
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=5,
|
||||
review="Great plugin!",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
response = client.post("/api/v1/analytics/rating", json=rating.model_dump(mode='json'))
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["rating_id"]
|
||||
assert data["status"] == "recorded"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_record_plugin_event():
|
||||
"""Test recording plugin event"""
|
||||
client = TestClient(app)
|
||||
event = PluginEvent(
|
||||
event_type="error",
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
data={"error": "timeout"},
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
response = client.post("/api/v1/analytics/event", json=event.model_dump(mode='json'))
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["event_id"]
|
||||
assert data["status"] == "recorded"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_usage():
|
||||
"""Test getting plugin usage analytics"""
|
||||
client = TestClient(app)
|
||||
# Record usage first
|
||||
usage = PluginUsage(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
action="install",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
client.post("/api/v1/analytics/usage", json=usage.model_dump(mode='json'))
|
||||
|
||||
response = client.get("/api/v1/analytics/usage/plugin_123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["plugin_id"] == "plugin_123"
|
||||
assert "usage_statistics" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_performance():
|
||||
"""Test getting plugin performance analytics"""
|
||||
client = TestClient(app)
|
||||
# Record performance first
|
||||
perf = PluginPerformance(
|
||||
plugin_id="plugin_123",
|
||||
version="1.0.0",
|
||||
cpu_usage=50.5,
|
||||
memory_usage=30.2,
|
||||
response_time=0.123,
|
||||
error_rate=0.001,
|
||||
uptime=99.9,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
client.post("/api/v1/analytics/performance", json=perf.model_dump(mode='json'))
|
||||
|
||||
response = client.get("/api/v1/analytics/performance/plugin_123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["plugin_id"] == "plugin_123"
|
||||
assert "performance_statistics" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_plugin_ratings():
|
||||
"""Test getting plugin ratings"""
|
||||
client = TestClient(app)
|
||||
# Record rating first
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=5,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
client.post("/api/v1/analytics/rating", json=rating.model_dump(mode='json'))
|
||||
|
||||
response = client.get("/api/v1/analytics/ratings/plugin_123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["plugin_id"] == "plugin_123"
|
||||
assert "rating_statistics" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_analytics_dashboard():
|
||||
"""Test getting analytics dashboard"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/dashboard")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "dashboard" in data
|
||||
assert "overview" in data["dashboard"]
|
||||
assert "trending_plugins" in data["dashboard"]
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_usage_trends():
|
||||
"""Test getting usage trends"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/trends")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "trends" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_get_usage_trends_plugin_specific():
|
||||
"""Test getting usage trends for specific plugin"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/trends?plugin_id=plugin_123")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "plugin_id" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_generate_analytics_report_usage():
|
||||
"""Test generating usage report"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/reports?report_type=usage")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_generate_analytics_report_performance():
|
||||
"""Test generating performance report"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/reports?report_type=performance")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_generate_analytics_report_ratings():
|
||||
"""Test generating ratings report"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/reports?report_type=ratings")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_generate_analytics_report_invalid():
|
||||
"""Test generating analytics report with invalid type"""
|
||||
client = TestClient(app)
|
||||
response = client.get("/api/v1/analytics/reports?report_type=invalid")
|
||||
assert response.status_code == 400
|
||||
123
apps/plugin-analytics/tests/test_unit_plugin_analytics.py
Normal file
123
apps/plugin-analytics/tests/test_unit_plugin_analytics.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""Unit tests for plugin analytics service"""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from main import app, PluginUsage, PluginPerformance, PluginRating, PluginEvent
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_app_initialization():
|
||||
"""Test that the FastAPI app initializes correctly"""
|
||||
assert app is not None
|
||||
assert app.title == "AITBC Plugin Analytics Service"
|
||||
assert app.version == "1.0.0"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_usage_model():
|
||||
"""Test PluginUsage model"""
|
||||
usage = PluginUsage(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
action="install",
|
||||
timestamp=datetime.utcnow(),
|
||||
metadata={"source": "marketplace"}
|
||||
)
|
||||
assert usage.plugin_id == "plugin_123"
|
||||
assert usage.user_id == "user_123"
|
||||
assert usage.action == "install"
|
||||
assert usage.metadata == {"source": "marketplace"}
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_usage_defaults():
|
||||
"""Test PluginUsage with default metadata"""
|
||||
usage = PluginUsage(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
action="use",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert usage.metadata == {}
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_performance_model():
|
||||
"""Test PluginPerformance model"""
|
||||
perf = PluginPerformance(
|
||||
plugin_id="plugin_123",
|
||||
version="1.0.0",
|
||||
cpu_usage=50.5,
|
||||
memory_usage=30.2,
|
||||
response_time=0.123,
|
||||
error_rate=0.001,
|
||||
uptime=99.9,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert perf.plugin_id == "plugin_123"
|
||||
assert perf.version == "1.0.0"
|
||||
assert perf.cpu_usage == 50.5
|
||||
assert perf.memory_usage == 30.2
|
||||
assert perf.response_time == 0.123
|
||||
assert perf.error_rate == 0.001
|
||||
assert perf.uptime == 99.9
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_rating_model():
|
||||
"""Test PluginRating model"""
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=5,
|
||||
review="Great plugin!",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert rating.plugin_id == "plugin_123"
|
||||
assert rating.rating == 5
|
||||
assert rating.review == "Great plugin!"
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_rating_defaults():
|
||||
"""Test PluginRating with default review"""
|
||||
rating = PluginRating(
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
rating=4,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert rating.review is None
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_event_model():
|
||||
"""Test PluginEvent model"""
|
||||
event = PluginEvent(
|
||||
event_type="error",
|
||||
plugin_id="plugin_123",
|
||||
user_id="user_123",
|
||||
data={"error": "timeout"},
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert event.event_type == "error"
|
||||
assert event.plugin_id == "plugin_123"
|
||||
assert event.user_id == "user_123"
|
||||
assert event.data == {"error": "timeout"}
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
def test_plugin_event_defaults():
|
||||
"""Test PluginEvent with default values"""
|
||||
event = PluginEvent(
|
||||
event_type="info",
|
||||
plugin_id="plugin_123",
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
assert event.user_id is None
|
||||
assert event.data == {}
|
||||
Reference in New Issue
Block a user