catagory A round 2

This commit is contained in:
2025-07-26 11:56:51 -07:00
parent b642b562f0
commit 463f881eaf
2 changed files with 152 additions and 5 deletions

View File

@@ -9,6 +9,9 @@ from tqdm import tqdm
import warnings
import time
import subprocess
import gc
import psutil
import os
from .config import VR180Config
from .detector import YOLODetector
@@ -49,6 +52,96 @@ class VideoProcessor:
self._initialize_models()
def _get_process_memory_info(self) -> Dict[str, float]:
"""Get detailed memory usage for current process and children"""
current_process = psutil.Process(os.getpid())
# Get memory info for current process
memory_info = current_process.memory_info()
current_rss = memory_info.rss / 1024**3 # Convert to GB
current_vms = memory_info.vms / 1024**3 # Virtual memory
# Get memory info for all children
children_rss = 0
children_vms = 0
child_count = 0
try:
for child in current_process.children(recursive=True):
try:
child_memory = child.memory_info()
children_rss += child_memory.rss / 1024**3
children_vms += child_memory.vms / 1024**3
child_count += 1
except (psutil.NoSuchProcess, psutil.AccessDenied):
pass
except psutil.NoSuchProcess:
pass
# System memory info
system_memory = psutil.virtual_memory()
system_total = system_memory.total / 1024**3
system_available = system_memory.available / 1024**3
system_used = system_memory.used / 1024**3
system_percent = system_memory.percent
return {
'process_rss_gb': current_rss,
'process_vms_gb': current_vms,
'children_rss_gb': children_rss,
'children_vms_gb': children_vms,
'total_process_gb': current_rss + children_rss,
'child_count': child_count,
'system_total_gb': system_total,
'system_used_gb': system_used,
'system_available_gb': system_available,
'system_percent': system_percent
}
def _print_memory_step(self, step_name: str):
"""Print memory usage for a specific processing step"""
memory_info = self._get_process_memory_info()
print(f"\n📊 MEMORY: {step_name}")
print(f" Process RSS: {memory_info['process_rss_gb']:.2f} GB")
if memory_info['children_rss_gb'] > 0:
print(f" Children RSS: {memory_info['children_rss_gb']:.2f} GB ({memory_info['child_count']} processes)")
print(f" Total Process: {memory_info['total_process_gb']:.2f} GB")
print(f" System: {memory_info['system_used_gb']:.1f}/{memory_info['system_total_gb']:.1f} GB ({memory_info['system_percent']:.1f}%)")
print(f" Available: {memory_info['system_available_gb']:.1f} GB")
def _aggressive_memory_cleanup(self, step_name: str = ""):
"""Perform aggressive memory cleanup and report before/after"""
if step_name:
print(f"\n🧹 CLEANUP: Before {step_name}")
before_info = self._get_process_memory_info()
before_rss = before_info['total_process_gb']
# Multiple rounds of garbage collection
for i in range(3):
gc.collect()
# Clear torch cache if available
try:
import torch
if torch.cuda.is_available():
torch.cuda.empty_cache()
torch.cuda.synchronize()
except ImportError:
pass
# Brief pause to allow cleanup
time.sleep(0.1)
after_info = self._get_process_memory_info()
after_rss = after_info['total_process_gb']
freed_memory = before_rss - after_rss
if step_name:
print(f" Before: {before_rss:.2f} GB → After: {after_rss:.2f} GB")
print(f" Freed: {freed_memory:.2f} GB")
def _initialize_models(self):
"""Initialize YOLO detector and SAM2 model"""
print("Initializing models...")