Skip to content

Commit

Permalink
chore(callback): Remove old peft saving code
Browse files Browse the repository at this point in the history
  • Loading branch information
NanoCode012 committed Aug 30, 2023
1 parent c56b450 commit d1c9683
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 30 deletions.
23 changes: 0 additions & 23 deletions src/axolotl/utils/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,29 +38,6 @@
IGNORE_INDEX = -100


class SavePeftModelCallback(TrainerCallback): # pylint: disable=too-few-public-methods
"""Callback to save the PEFT adapter"""

def on_save(
self,
args: TrainingArguments,
state: TrainerState,
control: TrainerControl,
**kwargs,
):
checkpoint_folder = os.path.join(
args.output_dir,
f"{PREFIX_CHECKPOINT_DIR}-{state.global_step}",
)

peft_model_path = os.path.join(checkpoint_folder, "adapter_model")
kwargs["model"].save_pretrained(
peft_model_path, save_safetensors=args.save_safetensors
)

return control


class SaveBetterTransformerModelCallback(
TrainerCallback
): # pylint: disable=too-few-public-methods
Expand Down
7 changes: 0 additions & 7 deletions src/axolotl/utils/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
from axolotl.utils.callbacks import (
GPUStatsCallback,
SaveBetterTransformerModelCallback,
SavePeftModelCallback,
bench_eval_callback_factory,
)
from axolotl.utils.collators import DataCollatorForSeq2Seq
Expand Down Expand Up @@ -637,12 +636,6 @@ def setup_trainer(cfg, train_dataset, eval_dataset, model, tokenizer, total_num_
)
callbacks.append(early_stop_cb)

if cfg.local_rank == 0 and cfg.adapter in [
"lora",
"qlora",
]: # only save in rank 0
callbacks.append(SavePeftModelCallback)

if hasattr(model, "use_bettertransformer") and model.use_bettertransformer is True:
callbacks.append(SaveBetterTransformerModelCallback)

Expand Down

0 comments on commit d1c9683

Please sign in to comment.