Skip to content

Commit

Permalink
also wrap log_gpu_memory_usage
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Sep 21, 2023
1 parent b1f1f44 commit 8efef9a
Showing 1 changed file with 1 addition and 3 deletions.
4 changes: 1 addition & 3 deletions src/axolotl/utils/bench.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,8 @@ def gpu_memory_usage_smi(device=0):
return 0.0


@check_cuda_device((0, 0, 0))
def log_gpu_memory_usage(log, msg, device):
if not torch.cuda.is_available() or device == "auto":
return (0, 0, 0)

usage, cache, misc = gpu_memory_usage_all(device)
extras = []
if cache > 0:
Expand Down

0 comments on commit 8efef9a

Please sign in to comment.