Skip to content

Commit

Permalink
skip the gpu memory checks if the device is set to 'auto' (#609)
Browse files Browse the repository at this point in the history
* skip the gpu memory checks if the device is set to 'auto'

* skip gpu mem logging if cpu too

* don't worry about log_gpu_memory_usage since it calls another annotated fn

* rename decorator internal
  • Loading branch information
winglian authored Sep 21, 2023
1 parent 92512c3 commit 196ff11
Showing 1 changed file with 27 additions and 3 deletions.
30 changes: 27 additions & 3 deletions src/axolotl/utils/bench.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,48 @@
"""Benchmarking and measurement utilities"""
import functools

import pynvml
import torch
from pynvml.nvml import NVMLError


def check_cuda_device(default_value):
"""
wraps a function and returns the default value instead of running the
wrapped function if cuda isn't available or the device is auto
:param default_value:
:return:
"""

def deco(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
device = kwargs.get("device", args[0] if args else None)

if not torch.cuda.is_available() or device == "auto" or device == "cpu":
return default_value

return func(*args, **kwargs)

return wrapper

return deco


@check_cuda_device(0.0)
def gpu_memory_usage(device=0):
return torch.cuda.memory_allocated(device) / 1024.0**3


@check_cuda_device((0.0, 0.0, 0.0))
def gpu_memory_usage_all(device=0):
usage = torch.cuda.memory_allocated(device) / 1024.0**3
reserved = torch.cuda.memory_reserved(device) / 1024.0**3
smi = gpu_memory_usage_smi(device)
return usage, reserved - usage, max(0, smi - reserved)


@check_cuda_device(0.0)
def gpu_memory_usage_smi(device=0):
if isinstance(device, torch.device):
device = device.index
Expand All @@ -31,9 +58,6 @@ def gpu_memory_usage_smi(device=0):


def log_gpu_memory_usage(log, msg, device):
if not torch.cuda.is_available() or device == "auto":
return (0, 0, 0)

usage, cache, misc = gpu_memory_usage_all(device)
extras = []
if cache > 0:
Expand Down

0 comments on commit 196ff11

Please sign in to comment.