please god work
This commit is contained in:
@@ -453,20 +453,17 @@ class VideoProcessor:
|
||||
chunk_data.close()
|
||||
del chunk_data, frames_array
|
||||
|
||||
# Delete chunk file to free disk space
|
||||
try:
|
||||
chunk_file.unlink()
|
||||
print(f" 🗑️ Deleted {chunk_file.name}")
|
||||
except Exception as e:
|
||||
print(f" ⚠️ Could not delete {chunk_file.name}: {e}")
|
||||
# Don't delete checkpoint files - they're needed for potential resume
|
||||
# The checkpoint system manages cleanup separately
|
||||
print(f" 📋 Keeping checkpoint file: {chunk_file.name}")
|
||||
|
||||
# Aggressive cleanup and memory monitoring after each chunk
|
||||
self._aggressive_memory_cleanup(f"After streaming merge chunk {i}")
|
||||
|
||||
# Memory safety check
|
||||
memory_info = self._get_process_memory_info()
|
||||
if memory_info['rss_gb'] > 35: # Warning if approaching 46GB limit
|
||||
print(f"⚠️ High memory usage: {memory_info['rss_gb']:.1f}GB - forcing cleanup")
|
||||
if memory_info['total_process_gb'] > 35: # Warning if approaching 46GB limit
|
||||
print(f"⚠️ High memory usage: {memory_info['total_process_gb']:.1f}GB - forcing cleanup")
|
||||
gc.collect()
|
||||
import torch
|
||||
if torch.cuda.is_available():
|
||||
@@ -882,6 +879,13 @@ class VideoProcessor:
|
||||
# Use streaming merge to avoid memory accumulation (fixes OOM)
|
||||
print("\n🎬 Using streaming merge (no memory accumulation)...")
|
||||
|
||||
# For resume scenarios, make sure we have all chunk files
|
||||
if resume_info['can_resume']:
|
||||
checkpoint_chunk_files = checkpoint_mgr.get_completed_chunk_files()
|
||||
if len(checkpoint_chunk_files) != len(chunk_files):
|
||||
print(f"⚠️ Using {len(checkpoint_chunk_files)} checkpoint files instead of {len(chunk_files)} temp files")
|
||||
chunk_files = checkpoint_chunk_files
|
||||
|
||||
# Determine audio source for final video
|
||||
audio_source = None
|
||||
if self.config.output.preserve_audio and Path(self.config.input.video_path).exists():
|
||||
|
||||
Reference in New Issue
Block a user