cleanup and files added
This commit is contained in:
168
qt_app_pyside1/setup_vlm_model.py
Normal file
168
qt_app_pyside1/setup_vlm_model.py
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
OpenVINO LLaVA Model Setup Script
|
||||
Downloads and sets up the llava-v1.6-mistral-7b-hf-int8-ov model for traffic analysis
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
def install_requirements():
|
||||
"""Install required packages for OpenVINO GenAI"""
|
||||
print("🚀 Installing OpenVINO GenAI requirements...")
|
||||
|
||||
requirements = [
|
||||
"--pre",
|
||||
"-U",
|
||||
"--extra-index-url", "https://storage.openvinotoolkit.org/simple/wheels/pre-release",
|
||||
"openvino",
|
||||
"openvino-tokenizers",
|
||||
"openvino-genai"
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install"] + requirements)
|
||||
print("✅ OpenVINO GenAI installed successfully")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to install OpenVINO GenAI: {e}")
|
||||
return False
|
||||
|
||||
# Install huggingface_hub
|
||||
try:
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "install", "huggingface_hub"])
|
||||
print("✅ HuggingFace Hub installed successfully")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to install HuggingFace Hub: {e}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def download_model():
|
||||
"""Download the OpenVINO LLaVA model"""
|
||||
print("📥 Downloading OpenVINO LLaVA model...")
|
||||
|
||||
try:
|
||||
import huggingface_hub as hf_hub
|
||||
|
||||
model_id = "OpenVINO/llava-v1.6-mistral-7b-hf-int8-ov"
|
||||
|
||||
# Get the project root directory
|
||||
current_dir = Path(__file__).parent
|
||||
model_path = current_dir / "llava_openvino_model"
|
||||
|
||||
print(f"📁 Model will be saved to: {model_path}")
|
||||
|
||||
# Download model
|
||||
hf_hub.snapshot_download(model_id, local_dir=str(model_path))
|
||||
|
||||
print("✅ Model downloaded successfully")
|
||||
print(f"📁 Model location: {model_path}")
|
||||
|
||||
# List downloaded files
|
||||
files = list(model_path.glob("*"))
|
||||
print(f"📄 Downloaded files ({len(files)}):")
|
||||
for file in sorted(files):
|
||||
print(f" - {file.name}")
|
||||
|
||||
return model_path
|
||||
|
||||
except ImportError:
|
||||
print("❌ HuggingFace Hub not available. Please install it first.")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to download model: {e}")
|
||||
return None
|
||||
|
||||
def setup_model_directory():
|
||||
"""Set up the model directory structure"""
|
||||
current_dir = Path(__file__).parent
|
||||
model_path = current_dir / "llava_openvino_model"
|
||||
|
||||
if model_path.exists():
|
||||
print(f"✅ Model directory already exists: {model_path}")
|
||||
return model_path
|
||||
else:
|
||||
print(f"❌ Model directory not found: {model_path}")
|
||||
return None
|
||||
|
||||
def test_model():
|
||||
"""Test the downloaded model"""
|
||||
print("🧪 Testing the downloaded model...")
|
||||
|
||||
try:
|
||||
import openvino_genai as ov_genai
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
|
||||
current_dir = Path(__file__).parent
|
||||
model_path = current_dir / "llava_openvino_model"
|
||||
|
||||
if not model_path.exists():
|
||||
print("❌ Model not found for testing")
|
||||
return False
|
||||
|
||||
device = "CPU" # Start with CPU for compatibility
|
||||
|
||||
print(f"🔧 Loading model from: {model_path}")
|
||||
pipe = ov_genai.VLMPipeline(str(model_path), device)
|
||||
print("✅ Model loaded successfully!")
|
||||
|
||||
# Create a simple test image
|
||||
test_image = Image.new('RGB', (224, 224), color='red')
|
||||
image_data = np.array(test_image.getdata()).reshape(1, test_image.size[1], test_image.size[0], 3).astype(np.uint8)
|
||||
|
||||
import openvino as ov
|
||||
image_tensor = ov.Tensor(image_data)
|
||||
|
||||
print("🧪 Running test inference...")
|
||||
pipe.start_chat()
|
||||
output = pipe.generate("What color is this image?", image=image_tensor, max_new_tokens=10)
|
||||
pipe.finish_chat()
|
||||
|
||||
print(f"✅ Test successful! Model response: {output}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Model test failed: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main setup function"""
|
||||
print("=" * 60)
|
||||
print("🤖 OpenVINO LLaVA Model Setup for Traffic Analysis")
|
||||
print("=" * 60)
|
||||
|
||||
# Step 1: Install requirements
|
||||
if not install_requirements():
|
||||
print("❌ Setup failed at requirements installation")
|
||||
return False
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
|
||||
# Step 2: Download model
|
||||
model_path = download_model()
|
||||
if not model_path:
|
||||
print("❌ Setup failed at model download")
|
||||
return False
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
|
||||
# Step 3: Test model
|
||||
if not test_model():
|
||||
print("⚠️ Model test failed, but setup might still work")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("✅ OpenVINO LLaVA Model Setup Complete!")
|
||||
print(f"📁 Model location: {model_path}")
|
||||
print("🚀 You can now use the new VLM controller")
|
||||
print("=" * 60)
|
||||
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
if not success:
|
||||
sys.exit(1)
|
||||
Reference in New Issue
Block a user