From 07c943670bd6bbca01e0f0c9c9e07d993c6fc928 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Thu, 21 Sep 2023 09:15:46 -0400 Subject: [PATCH] don't worry about log_gpu_memory_usage since it calls another annotated fn --- src/axolotl/utils/bench.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/axolotl/utils/bench.py b/src/axolotl/utils/bench.py index 82f4dfddf2..e3b445023c 100644 --- a/src/axolotl/utils/bench.py +++ b/src/axolotl/utils/bench.py @@ -58,9 +58,6 @@ def gpu_memory_usage_smi(device=0): def log_gpu_memory_usage(log, msg, device): - if not torch.cuda.is_available() or device == "auto": - return (0, 0, 0) - usage, cache, misc = gpu_memory_usage_all(device) extras = [] if cache > 0: