From 66741047427d018365cb6d62c4348a0538e919b7 Mon Sep 17 00:00:00 2001 From: Qing Date: Sat, 20 May 2023 12:35:36 +0800 Subject: [PATCH] use torch_gc --- lama_cleaner/server.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lama_cleaner/server.py b/lama_cleaner/server.py index eaf5ab6..c186ce2 100644 --- a/lama_cleaner/server.py +++ b/lama_cleaner/server.py @@ -281,7 +281,6 @@ def process(): try: res_np_img = model(image, mask, config) except RuntimeError as e: - torch.cuda.empty_cache() if "CUDA out of memory. " in str(e): # NOTE: the string may change? return "CUDA out of memory", 500 @@ -290,7 +289,7 @@ def process(): return f"{str(e)}", 500 finally: logger.info(f"process time: {(time.time() - start) * 1000}ms") - torch.cuda.empty_cache() + torch_gc() res_np_img = cv2.cvtColor(res_np_img.astype(np.uint8), cv2.COLOR_BGR2RGB) if alpha_channel is not None: