Final repository
This commit is contained in:
@@ -52,7 +52,8 @@ class ModelManager:
|
||||
"confidence_threshold": 0.3,
|
||||
"enable_ocr": True,
|
||||
"enable_tracking": True,
|
||||
"model_path": None
|
||||
"model_path": None,
|
||||
"device": "GPU" # Force GPU usage for Intel Arc
|
||||
},
|
||||
"violations": {
|
||||
"red_light_grace_period": 2.0,
|
||||
@@ -97,13 +98,27 @@ class ModelManager:
|
||||
|
||||
# Initialize detector
|
||||
print(f"✅ Initializing OpenVINO detector with model: {model_path}")
|
||||
|
||||
# Store current model info for stats
|
||||
self.current_model_path = model_path
|
||||
self.current_model_name = self._extract_model_name_from_path(model_path)
|
||||
|
||||
device = self.config["detection"].get("device", "AUTO")
|
||||
print(f"✅ Using inference device: {device}")
|
||||
print(f"🔧 Model Manager: Config device setting: {device}")
|
||||
print(f"🔧 Model Manager: Creating detector with device: {device}")
|
||||
self.detector = OpenVINOVehicleDetector(
|
||||
model_path=model_path,
|
||||
device=device,
|
||||
confidence_threshold=self.config["detection"]["confidence_threshold"]
|
||||
)
|
||||
print(f"✅ Detector created with device: {device}")
|
||||
|
||||
# Verify the detector is using the correct device
|
||||
if hasattr(self.detector, 'device'):
|
||||
actual_device = self.detector.device
|
||||
print(f"🔍 Model Manager: Detector reports device as: {actual_device}")
|
||||
else:
|
||||
print(f"🔍 Model Manager: Detector device attribute not available")
|
||||
|
||||
# Use only RedLightViolationPipeline for violation/crosswalk/traffic light logic
|
||||
self.violation_pipeline = RedLightViolationPipeline(debug=True)
|
||||
@@ -128,18 +143,48 @@ class ModelManager:
|
||||
traceback.print_exc()
|
||||
|
||||
def _find_best_model_path(self, base_model_name: str = None) -> Optional[str]:
|
||||
|
||||
"""
|
||||
Find the best model path based on configuration.
|
||||
Now respects the model selection from config panel.
|
||||
"""
|
||||
|
||||
if base_model_name is None:
|
||||
device = self.config["detection"].get("device", "AUTO")
|
||||
if device == "CPU" or device == "AUTO":
|
||||
# Use yolo11n for CPU - faster, lighter model
|
||||
base_model_name = "yolo11n"
|
||||
print(f"🔍 Device is {device}, selecting {base_model_name} model (optimized for CPU)")
|
||||
# First, check if a specific model is selected in config
|
||||
selected_model = self.config["detection"].get("model", None)
|
||||
if selected_model and selected_model.lower() != "auto":
|
||||
base_model_name = selected_model.lower()
|
||||
# Convert YOLOv11x format to yolo11x format
|
||||
if 'yolov11' in base_model_name:
|
||||
base_model_name = base_model_name.replace('yolov11', 'yolo11')
|
||||
print(f"🎯 Using model selected from config panel: {base_model_name}")
|
||||
else:
|
||||
# Use yolo11x for GPU - larger model with better accuracy
|
||||
base_model_name = "yolo11x"
|
||||
print(f"🔍 Device is {device}, selecting {base_model_name} model (optimized for GPU)")
|
||||
# Fallback to device-based selection only if no specific model selected
|
||||
device = self.config["detection"].get("device", "AUTO")
|
||||
if device == "CPU" or device == "AUTO":
|
||||
# Use yolo11n for CPU - faster, lighter model
|
||||
base_model_name = "yolo11n"
|
||||
print(f"🔍 Device is {device}, selecting {base_model_name} model (optimized for CPU)")
|
||||
else:
|
||||
# Use yolo11x for GPU - larger model with better accuracy
|
||||
base_model_name = "yolo11x"
|
||||
print(f"🔍 Device is {device}, selecting {base_model_name} model (optimized for GPU)")
|
||||
|
||||
# Ensure we have a clean model name (remove any version suffixes)
|
||||
if base_model_name:
|
||||
# Handle different model name formats
|
||||
if "yolo11" in base_model_name.lower():
|
||||
if "11n" in base_model_name.lower():
|
||||
base_model_name = "yolo11n"
|
||||
elif "11x" in base_model_name.lower():
|
||||
base_model_name = "yolo11x"
|
||||
elif "11s" in base_model_name.lower():
|
||||
base_model_name = "yolo11s"
|
||||
elif "11m" in base_model_name.lower():
|
||||
base_model_name = "yolo11m"
|
||||
elif "11l" in base_model_name.lower():
|
||||
base_model_name = "yolo11l"
|
||||
|
||||
print(f"🔍 Looking for model: {base_model_name}")
|
||||
|
||||
# Check if the openvino_models directory exists in the current working directory
|
||||
cwd_openvino_dir = Path.cwd() / "openvino_models"
|
||||
@@ -201,6 +246,55 @@ class ModelManager:
|
||||
|
||||
print(f"❌ No model found for {base_model_name}")
|
||||
return None
|
||||
|
||||
def _extract_model_name_from_path(self, model_path: str) -> str:
|
||||
"""Extract model name from file path"""
|
||||
try:
|
||||
# Convert to lowercase for matching
|
||||
path_lower = model_path.lower()
|
||||
print(f"🔍 Extracting model name from path: {model_path}")
|
||||
print(f"🔍 Path lower: {path_lower}")
|
||||
|
||||
# Check for specific models
|
||||
if 'yolo11n' in path_lower:
|
||||
extracted_name = 'YOLOv11n'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
elif 'yolo11s' in path_lower:
|
||||
extracted_name = 'YOLOv11s'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
elif 'yolo11m' in path_lower:
|
||||
extracted_name = 'YOLOv11m'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
elif 'yolo11l' in path_lower:
|
||||
extracted_name = 'YOLOv11l'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
elif 'yolo11x' in path_lower:
|
||||
extracted_name = 'YOLOv11x'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
elif 'yolo11' in path_lower:
|
||||
extracted_name = 'YOLOv11'
|
||||
print(f"✅ Extracted model name: {extracted_name}")
|
||||
return extracted_name
|
||||
else:
|
||||
extracted_name = 'YOLO'
|
||||
print(f"⚠️ Fallback model name: {extracted_name}")
|
||||
return extracted_name
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error extracting model name: {e}")
|
||||
return 'Unknown'
|
||||
|
||||
def get_current_model_info(self) -> dict:
|
||||
"""Get current model information for stats"""
|
||||
return {
|
||||
'model_path': getattr(self, 'current_model_path', None),
|
||||
'model_name': getattr(self, 'current_model_name', 'Unknown'),
|
||||
'device': self.detector.get_device() if self.detector else 'Unknown'
|
||||
}
|
||||
|
||||
def detect(self, frame: np.ndarray) -> List[Dict]:
|
||||
"""
|
||||
@@ -392,8 +486,9 @@ class ModelManager:
|
||||
if not new_config:
|
||||
return
|
||||
|
||||
# Store old device setting to check if it changed
|
||||
# Store old settings to check if they changed
|
||||
old_device = self.config["detection"].get("device", "AUTO") if "detection" in self.config else "AUTO"
|
||||
old_model = self.config["detection"].get("model", "auto") if "detection" in self.config else "auto"
|
||||
|
||||
# Update configuration
|
||||
for section in new_config:
|
||||
@@ -402,21 +497,46 @@ class ModelManager:
|
||||
else:
|
||||
self.config[section] = new_config[section]
|
||||
|
||||
# Check if device changed - if so, we need to reinitialize models
|
||||
# Check if device or model changed - if so, we need to reinitialize models
|
||||
new_device = self.config["detection"].get("device", "AUTO")
|
||||
new_model = self.config["detection"].get("model", "auto")
|
||||
device_changed = old_device != new_device
|
||||
model_changed = old_model != new_model
|
||||
|
||||
if device_changed:
|
||||
print(f"📢 Device changed from {old_device} to {new_device}, reinitializing models...")
|
||||
# Reinitialize models with new device
|
||||
self._initialize_models()
|
||||
if device_changed or model_changed:
|
||||
print(f"📢 Configuration changed:")
|
||||
if device_changed:
|
||||
print(f" Device: {old_device} → {new_device}")
|
||||
if model_changed:
|
||||
print(f" Model: {old_model} → {new_model}")
|
||||
print(f" Reinitializing models...")
|
||||
|
||||
# Force complete reinitialization - let the model path extraction handle the naming
|
||||
self.force_model_reload()
|
||||
return
|
||||
|
||||
# Just update detector confidence threshold if device didn't change
|
||||
# Just update detector confidence threshold if device and model didn't change
|
||||
if self.detector:
|
||||
conf_thres = self.config["detection"].get("confidence_threshold", 0.5)
|
||||
self.detector.conf_thres = conf_thres
|
||||
|
||||
def force_model_reload(self):
|
||||
"""Force complete model reload with current config"""
|
||||
print("🔄 Force reloading models with current configuration...")
|
||||
|
||||
# Get the configured model selection
|
||||
selected_model = self.config["detection"].get("model", "auto")
|
||||
print(f"🎯 Force reload: Config model selection = {selected_model}")
|
||||
|
||||
# Clear current models
|
||||
self.detector = None
|
||||
self.violation_pipeline = None
|
||||
|
||||
# Reinitialize with current config - let _initialize_models handle the naming
|
||||
self._initialize_models()
|
||||
|
||||
print("✅ Models reloaded successfully")
|
||||
|
||||
def _bbox_iou(self, boxA, boxB):
|
||||
# Compute the intersection over union of two boxes
|
||||
xA = max(boxA[0], boxB[0])
|
||||
|
||||
Reference in New Issue
Block a user