Skip to content

Commit

Permalink
use temp_dir kwarg instead
Browse files Browse the repository at this point in the history
  • Loading branch information
winglian committed Nov 6, 2023
1 parent 7de6a56 commit 6dc68a6
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 31 deletions.
6 changes: 3 additions & 3 deletions tests/e2e/test_fused_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class TestFusedLlama(unittest.TestCase):
"""

@with_temp_dir
def test_fft_packing(self, output_dir):
def test_fft_packing(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand All @@ -52,7 +52,7 @@ def test_fft_packing(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -70,4 +70,4 @@ def test_fft_packing(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()
assert (Path(temp_dir) / "pytorch_model.bin").exists()
18 changes: 9 additions & 9 deletions tests/e2e/test_lora_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class TestLoraLlama(unittest.TestCase):
"""

@with_temp_dir
def test_lora(self, output_dir):
def test_lora(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -53,7 +53,7 @@ def test_lora(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 8,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -64,10 +64,10 @@ def test_lora(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "adapter_model.bin").exists()
assert (Path(temp_dir) / "adapter_model.bin").exists()

@with_temp_dir
def test_lora_packing(self, output_dir):
def test_lora_packing(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -97,7 +97,7 @@ def test_lora_packing(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 8,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -108,10 +108,10 @@ def test_lora_packing(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "adapter_model.bin").exists()
assert (Path(temp_dir) / "adapter_model.bin").exists()

@with_temp_dir
def test_lora_gptq(self, output_dir):
def test_lora_gptq(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -145,7 +145,7 @@ def test_lora_gptq(self, output_dir):
"save_steps": 0.5,
"micro_batch_size": 8,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -156,4 +156,4 @@ def test_lora_gptq(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "adapter_model.bin").exists()
assert (Path(temp_dir) / "adapter_model.bin").exists()
12 changes: 6 additions & 6 deletions tests/e2e/test_mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class TestMistral(unittest.TestCase):
"""

@with_temp_dir
def test_lora(self, output_dir):
def test_lora(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -55,7 +55,7 @@ def test_lora(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -69,10 +69,10 @@ def test_lora(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "adapter_model.bin").exists()
assert (Path(temp_dir) / "adapter_model.bin").exists()

@with_temp_dir
def test_ft(self, output_dir):
def test_ft(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand All @@ -94,7 +94,7 @@ def test_ft(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -112,4 +112,4 @@ def test_ft(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()
assert (Path(temp_dir) / "pytorch_model.bin").exists()
12 changes: 6 additions & 6 deletions tests/e2e/test_mistral_samplepack.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class TestMistral(unittest.TestCase):
"""

@with_temp_dir
def test_lora_packing(self, output_dir):
def test_lora_packing(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -56,7 +56,7 @@ def test_lora_packing(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -70,10 +70,10 @@ def test_lora_packing(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "adapter_model.bin").exists()
assert (Path(temp_dir) / "adapter_model.bin").exists()

@with_temp_dir
def test_ft_packing(self, output_dir):
def test_ft_packing(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand All @@ -96,7 +96,7 @@ def test_ft_packing(self, output_dir):
"num_epochs": 2,
"micro_batch_size": 2,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_torch",
"lr_scheduler": "cosine",
Expand All @@ -114,4 +114,4 @@ def test_ft_packing(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()
assert (Path(temp_dir) / "pytorch_model.bin").exists()
12 changes: 6 additions & 6 deletions tests/e2e/test_phi.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class TestPhi(unittest.TestCase):
"""

@with_temp_dir
def test_ft(self, output_dir):
def test_ft(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -55,7 +55,7 @@ def test_ft(self, output_dir):
"num_epochs": 1,
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_bnb_8bit",
"lr_scheduler": "cosine",
Expand All @@ -67,10 +67,10 @@ def test_ft(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()
assert (Path(temp_dir) / "pytorch_model.bin").exists()

@with_temp_dir
def test_ft_packed(self, output_dir):
def test_ft_packed(self, temp_dir):
# pylint: disable=duplicate-code
cfg = DictDefault(
{
Expand Down Expand Up @@ -100,7 +100,7 @@ def test_ft_packed(self, output_dir):
"num_epochs": 1,
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
"output_dir": output_dir,
"output_dir": temp_dir,
"learning_rate": 0.00001,
"optimizer": "adamw_bnb_8bit",
"lr_scheduler": "cosine",
Expand All @@ -112,4 +112,4 @@ def test_ft_packed(self, output_dir):
dataset_meta = load_datasets(cfg=cfg, cli_args=cli_args)

train(cfg=cfg, cli_args=cli_args, dataset_meta=dataset_meta)
assert (Path(output_dir) / "pytorch_model.bin").exists()
assert (Path(temp_dir) / "pytorch_model.bin").exists()
2 changes: 1 addition & 1 deletion tests/e2e/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def wrapper(*args, **kwargs):
temp_dir = tempfile.mkdtemp()
try:
# Pass the temporary directory to the test function
test_func(temp_dir, *args, **kwargs)
test_func(*args, temp_dir=temp_dir, **kwargs)
finally:
# Clean up the directory after the test
shutil.rmtree(temp_dir)
Expand Down

0 comments on commit 6dc68a6

Please sign in to comment.