Skip to content

Commit

Permalink
Fix for check with cfg and merge_lora
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Sep 18, 2023
1 parent 6b9b229 commit 024b5cb
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion src/axolotl/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def do_merge_lora(
model.to(dtype=torch.float16)

if cfg.local_rank == 0:
LOG.info("saving merged model")
LOG.info(f"saving merged model to: {str(Path(cfg.output_dir) / 'merged')}")
model.save_pretrained(
str(Path(cfg.output_dir) / "merged"),
safe_serialization=safe_serialization,
Expand Down
2 changes: 1 addition & 1 deletion src/axolotl/cli/merge_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@
def do_cli(config: Path = Path("examples/"), **kwargs):
# pylint: disable=duplicate-code
print_axolotl_text_art()
parsed_cfg = load_cfg(config, **kwargs)
parser = transformers.HfArgumentParser((TrainerCliArgs))
parsed_cli_args, _ = parser.parse_args_into_dataclasses(
return_remaining_strings=True
)
parsed_cli_args.merge_lora = True
parsed_cfg = load_cfg(config, merge_lora=True, **kwargs)

do_merge_lora(cfg=parsed_cfg, cli_args=parsed_cli_args)

Expand Down

0 comments on commit 024b5cb

Please sign in to comment.