Skip to content

Commit

Permalink
remove lora fused packing test (axolotl-ai-cloud#758)
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Oct 22, 2023
1 parent 9a420e0 commit c6ea0a9
Showing 1 changed file with 0 additions and 44 deletions.
44 changes: 0 additions & 44 deletions tests/e2e/test_fused_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,50 +25,6 @@ class TestFusedLlama(unittest.TestCase):
Test case for Llama models using Fused layers
"""

def test_lora_packing(self):
# pylint: disable=duplicate-code
output_dir = tempfile.mkdtemp()
cfg = DictDefault(
{
"base_model": "JackFram/llama-68m",
"base_model_config": "JackFram/llama-68m",
"flash_attention": True,
"flash_attn_fuse_qkv": True,
"flash_attn_fuse_mlp": True,
"sample_packing": True,
"sequence_len": 1024,
"load_in_8bit": True,
"val_set_size": 0.1,
"special_tokens": {
"unk_token": "<unk>",
"bos_token": "<s>",
"eos_token": "</s>",
},
"datasets": [
{
"path": "mhenrichsen/alpaca_2k_test",
"type": "alpaca",
},
],
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
"max_steps": 20,
"save_steps": 10,
"eval_steps": 10,
}
)
normalize_config(cfg)
cli_args = TrainerCliArgs()
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()

def test_fft_packing(self):
# pylint: disable=duplicate-code
output_dir = tempfile.mkdtemp()
Expand Down

0 comments on commit c6ea0a9

Please sign in to comment.