From 440c3ab527603ba699967b05064dd5d20bd13a4e Mon Sep 17 00:00:00 2001 From: NanoCode012 Date: Thu, 19 Oct 2023 11:13:20 +0900 Subject: [PATCH] Fix(model): Linear detected and added to target module with rope linear (#738) * Fix(model): Linear detected and added to target module with rope linear * fix: exclude layer instead --- src/axolotl/utils/models.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index c133e9eb6..bccb8b8e5 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -507,7 +507,11 @@ def find_all_linear_names(model): cls = (bnb.nn.Linear4bit, bnb.nn.Linear8bitLt, torch.nn.Linear, QuantLinear) lora_module_names = set() for name, module in model.named_modules(): - if isinstance(module, cls) or "Linear" in module.__class__.__name__: + if ( + isinstance(module, cls) + or "Linear" in module.__class__.__name__ + and module.__class__.__name__ not in ("LlamaLinearScalingRotaryEmbedding",) + ): names = name.split(".") lora_module_names.add(names[0] if len(names) == 1 else names[-1])