From e7e9c5597bf9be81cda8ea2ec2bfbe7629b9cff7 Mon Sep 17 00:00:00 2001 From: Scott Register Date: Sat, 26 Jul 2025 13:21:39 -0700 Subject: [PATCH] old sam cleanup --- vr180_matting/sam2_wrapper.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/vr180_matting/sam2_wrapper.py b/vr180_matting/sam2_wrapper.py index 8beba6b..ac8774b 100644 --- a/vr180_matting/sam2_wrapper.py +++ b/vr180_matting/sam2_wrapper.py @@ -256,12 +256,33 @@ class SAM2VideoMatting: """Clean up resources""" if self.inference_state is not None: try: - if hasattr(self.predictor, 'cleanup_state'): + # Reset SAM2 state first (critical for memory cleanup) + if hasattr(self.predictor, 'reset_state'): + self.predictor.reset_state(self.inference_state) + + # Fallback to cleanup_state if available + elif hasattr(self.predictor, 'cleanup_state'): self.predictor.cleanup_state(self.inference_state) + + # Explicitly delete inference state and video segments + del self.inference_state + if hasattr(self, 'video_segments') and self.video_segments: + del self.video_segments + self.video_segments = {} + except Exception as e: warnings.warn(f"Failed to cleanup SAM2 state: {e}") - - self.inference_state = None + finally: + self.inference_state = None + + # Explicitly delete predictor + if self.predictor is not None: + try: + del self.predictor + except Exception as e: + warnings.warn(f"Failed to delete predictor: {e}") + finally: + self.predictor = None # Clean up temporary video file if self.temp_video_path is not None: @@ -276,6 +297,10 @@ class SAM2VideoMatting: # Clear CUDA cache if torch.cuda.is_available(): torch.cuda.empty_cache() + + # Force garbage collection (critical for memory leak prevention) + import gc + gc.collect() def __del__(self): """Destructor to ensure cleanup"""