235 lines
7.9 KiB
Python
235 lines
7.9 KiB
Python
"""
|
|
Smart Intersection Integration Validation Script
|
|
Test the integration of scene analytics into the desktop application
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
from pathlib import Path
|
|
|
|
# Add project root to path
|
|
project_root = Path(__file__).parent
|
|
sys.path.insert(0, str(project_root))
|
|
|
|
def test_imports():
|
|
"""Test that all required imports work"""
|
|
print("=== Testing Imports ===")
|
|
|
|
try:
|
|
from utils.scene_analytics import SceneAnalyticsAdapter, FPSCalculator, ObjectTracker, ROIAnalyzer
|
|
print("✅ Scene analytics utilities imported successfully")
|
|
except Exception as e:
|
|
print(f"❌ Scene analytics import failed: {e}")
|
|
return False
|
|
|
|
try:
|
|
from ui.smart_intersection_config import SmartIntersectionConfigPanel
|
|
print("✅ Smart intersection config panel imported successfully")
|
|
except Exception as e:
|
|
print(f"❌ Smart intersection config import failed: {e}")
|
|
return False
|
|
|
|
try:
|
|
from ui.user_guide_tab import UserGuideTab
|
|
print("⚠️ User guide tab imported (may have markdown dependency)")
|
|
except Exception as e:
|
|
print(f"⚠️ User guide tab import failed (expected - missing markdown): {e}")
|
|
|
|
return True
|
|
|
|
def test_configuration_files():
|
|
"""Test configuration files exist and are valid"""
|
|
print("\n=== Testing Configuration Files ===")
|
|
|
|
config_path = project_root / "config" / "smart-intersection"
|
|
|
|
try:
|
|
import json
|
|
|
|
# Test tracker config
|
|
tracker_config_file = config_path / "tracker-config.json"
|
|
if tracker_config_file.exists():
|
|
with open(tracker_config_file, 'r') as f:
|
|
tracker_config = json.load(f)
|
|
print("✅ Tracker configuration file loaded successfully")
|
|
print(f" - Max unreliable frames: {tracker_config.get('max_unreliable_frames', 'N/A')}")
|
|
print(f" - Baseline frame rate: {tracker_config.get('baseline_frame_rate', 'N/A')}")
|
|
else:
|
|
print(f"❌ Tracker config file not found: {tracker_config_file}")
|
|
return False
|
|
|
|
# Test desktop config
|
|
desktop_config_file = config_path / "desktop-config.json"
|
|
if desktop_config_file.exists():
|
|
with open(desktop_config_file, 'r') as f:
|
|
desktop_config = json.load(f)
|
|
print("✅ Desktop configuration file loaded successfully")
|
|
app_config = desktop_config.get('desktop_app_config', {})
|
|
print(f" - Multi-camera enabled: {app_config.get('scene_analytics', {}).get('enable_multi_camera', 'N/A')}")
|
|
print(f" - GPU device: {app_config.get('performance_settings', {}).get('gpu_device', 'N/A')}")
|
|
else:
|
|
print(f"❌ Desktop config file not found: {desktop_config_file}")
|
|
return False
|
|
|
|
except Exception as e:
|
|
print(f"❌ Configuration file test failed: {e}")
|
|
return False
|
|
|
|
return True
|
|
|
|
def test_documentation():
|
|
"""Test documentation files exist"""
|
|
print("\n=== Testing Documentation ===")
|
|
|
|
docs_path = project_root / "docs" / "user-guide"
|
|
|
|
if not docs_path.exists():
|
|
print(f"❌ Documentation directory not found: {docs_path}")
|
|
return False
|
|
|
|
doc_files = list(docs_path.glob("*.md"))
|
|
if not doc_files:
|
|
print("⚠️ No documentation files found")
|
|
return False
|
|
|
|
for doc_file in doc_files:
|
|
print(f"✅ Found documentation: {doc_file.name}")
|
|
|
|
return True
|
|
|
|
def test_openvino_device_detection():
|
|
"""Test OpenVINO device detection"""
|
|
print("\n=== Testing OpenVINO Device Detection ===")
|
|
|
|
try:
|
|
import openvino as ov
|
|
core = ov.Core()
|
|
available_devices = core.available_devices
|
|
|
|
print(f"✅ OpenVINO Core initialized successfully")
|
|
print(f"Available devices: {available_devices}")
|
|
|
|
# Check for Intel Arc GPU
|
|
gpu_devices = [d for d in available_devices if 'GPU' in d]
|
|
if gpu_devices:
|
|
print(f"✅ GPU devices found: {gpu_devices}")
|
|
|
|
# Get device info for first GPU
|
|
try:
|
|
gpu_device = gpu_devices[0]
|
|
device_name = core.get_property(gpu_device, "FULL_DEVICE_NAME")
|
|
print(f" GPU Name: {device_name}")
|
|
|
|
if 'arc' in device_name.lower() or 'intel' in device_name.lower():
|
|
print("✅ Intel Arc GPU detected!")
|
|
else:
|
|
print("⚠️ GPU detected but may not be Intel Arc")
|
|
|
|
except Exception as e:
|
|
print(f"⚠️ Could not get GPU device info: {e}")
|
|
else:
|
|
print("⚠️ No GPU devices found")
|
|
|
|
return len(available_devices) > 0
|
|
|
|
except Exception as e:
|
|
print(f"❌ OpenVINO device detection failed: {e}")
|
|
return False
|
|
|
|
def test_scene_analytics_functionality():
|
|
"""Test scene analytics adapter functionality"""
|
|
print("\n=== Testing Scene Analytics Functionality ===")
|
|
|
|
try:
|
|
from utils.scene_analytics import SceneAnalyticsAdapter
|
|
import numpy as np
|
|
|
|
# Create test adapter
|
|
adapter = SceneAnalyticsAdapter(camera_id="test_cam")
|
|
print("✅ Scene analytics adapter created")
|
|
|
|
# Create test frame and detections
|
|
test_frame = np.zeros((480, 640, 3), dtype=np.uint8)
|
|
test_detections = [
|
|
{
|
|
'bbox': [100, 100, 50, 100],
|
|
'confidence': 0.8,
|
|
'class_name': 'person',
|
|
'class_id': 0
|
|
},
|
|
{
|
|
'bbox': [200, 150, 80, 60],
|
|
'confidence': 0.9,
|
|
'class_name': 'car',
|
|
'class_id': 1
|
|
}
|
|
]
|
|
|
|
# Process test frame
|
|
analytics_result = adapter.process_frame(test_frame, test_detections)
|
|
print("✅ Scene analytics processing completed")
|
|
print(f" - FPS: {analytics_result.get('fps', 'N/A')}")
|
|
print(f" - Objects detected: {len(analytics_result.get('objects', []))}")
|
|
print(f" - Processing time: {analytics_result.get('processing_time_ms', 'N/A'):.2f}ms")
|
|
|
|
# Test performance stats
|
|
perf_stats = adapter.get_performance_stats()
|
|
print(f" - Performance stats: {len(perf_stats)} metrics")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ Scene analytics functionality test failed: {e}")
|
|
import traceback
|
|
traceback.print_exc()
|
|
return False
|
|
|
|
def main():
|
|
"""Run all validation tests"""
|
|
print("Smart Intersection Integration Validation")
|
|
print("=" * 50)
|
|
|
|
tests = [
|
|
("Imports", test_imports),
|
|
("Configuration Files", test_configuration_files),
|
|
("Documentation", test_documentation),
|
|
("OpenVINO Device Detection", test_openvino_device_detection),
|
|
("Scene Analytics Functionality", test_scene_analytics_functionality)
|
|
]
|
|
|
|
results = {}
|
|
|
|
for test_name, test_func in tests:
|
|
try:
|
|
results[test_name] = test_func()
|
|
except Exception as e:
|
|
print(f"❌ Test '{test_name}' crashed: {e}")
|
|
results[test_name] = False
|
|
|
|
# Summary
|
|
print("\n" + "=" * 50)
|
|
print("VALIDATION SUMMARY")
|
|
print("=" * 50)
|
|
|
|
passed = 0
|
|
total = len(tests)
|
|
|
|
for test_name, result in results.items():
|
|
status = "✅ PASS" if result else "❌ FAIL"
|
|
print(f"{test_name:<30} {status}")
|
|
if result:
|
|
passed += 1
|
|
|
|
print(f"\nOverall: {passed}/{total} tests passed")
|
|
|
|
if passed == total:
|
|
print("🎉 All tests passed! Smart Intersection integration is ready.")
|
|
else:
|
|
print("⚠️ Some tests failed. Review the issues above.")
|
|
|
|
return passed == total
|
|
|
|
if __name__ == "__main__":
|
|
success = main()
|
|
sys.exit(0 if success else 1)
|