Skip to content

Commit

Permalink
cleanup previous implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Oct 28, 2023
1 parent 8d609a8 commit 71a6f1f
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 100 deletions.
40 changes: 0 additions & 40 deletions src/axolotl/monkeypatch/llama_embeddings_hijack.py

This file was deleted.

40 changes: 0 additions & 40 deletions src/axolotl/monkeypatch/mistral_embeddings_hijack.py

This file was deleted.

20 changes: 0 additions & 20 deletions src/axolotl/utils/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,26 +180,6 @@ def load_model(
LOG.info("patching with flash attention")
replace_mistral_attn_with_flash_attn(packed=cfg.sample_packing)

# if cfg.is_llama_derived_model and cfg.noisy_embedding_alpha:
# from axolotl.monkeypatch.llama_embeddings_hijack import (
# replace_llama_embeddings_with_uniform_distribution,
# )
#
# LOG.info("patching with noisy embeddings")
# replace_llama_embeddings_with_uniform_distribution(
# noise_alpha=cfg.noisy_embedding_alpha
# )
#
# if cfg.is_mistral_derived_model and cfg.noisy_embedding_alpha:
# from axolotl.monkeypatch.mistral_embeddings_hijack import (
# replace_mistral_embeddings_with_uniform_distribution,
# )
#
# LOG.info("patching with noisy embeddings")
# replace_mistral_embeddings_with_uniform_distribution(
# noise_alpha=cfg.noisy_embedding_alpha
# )
#
if cfg.is_llama_derived_model and cfg.xpos_rope:
from axolotl.monkeypatch.xpos_rope_llama_monkey_patch import (
replace_llama_rope_with_xpos_rope,
Expand Down

0 comments on commit 71a6f1f

Please sign in to comment.