Skip to content

Commit

Permalink
fix: validate that gpu_memory_limit and max_memory are not both set
Browse files Browse the repository at this point in the history
  • Loading branch information
kallewoof committed Dec 29, 2023
1 parent cd34680 commit 4425a6f
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 1 deletion.
5 changes: 5 additions & 0 deletions src/axolotl/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,6 +462,11 @@ def validate_config(cfg):
"lora_modules_to_save not properly set yet adding new tokens. Please add `embed_tokens` and `lm_head` to `lora_modules_to_save`."
)

if cfg.max_memory is not None and cfg.gpu_memory_limit is not None:
raise ValueError(
"max_memory and gpu_memory_limit are mutually exclusive and cannot be used together."
)

# TODO
# MPT 7b
# https://github.com/facebookresearch/bitsandbytes/issues/25
Expand Down
2 changes: 1 addition & 1 deletion src/axolotl/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ def load_model(
max_memory = cfg.max_memory
device_map = cfg.device_map

if cfg.gpu_memory_limit and max_memory is None:
if cfg.gpu_memory_limit:
gpu_memory_limit = (
str(cfg.gpu_memory_limit) + "GiB"
if isinstance(cfg.gpu_memory_limit, int)
Expand Down

0 comments on commit 4425a6f

Please sign in to comment.