Skip to content

Commit

Permalink
Hotfix/sg 645 regression tests essential fixes (#669)
Browse files Browse the repository at this point in the history
* release tag removed for check

* missing ckpt root in recipes fixed

* warn instead of error if max batches longer then loader

* .__version__update

* release tag filter added back
  • Loading branch information
shaydeci authored Jan 30, 2023
1 parent 69a82bc commit e32da15
Show file tree
Hide file tree
Showing 8 changed files with 45 additions and 45 deletions.
56 changes: 28 additions & 28 deletions .circleci/config.yml

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/super_gradients/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,6 @@
"setup_device",
]

__version__ = "3.0.6"
__version__ = "3.0.7"

env_sanity_check()
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ arch_params:
experiment_name: mobileNetv3_large_training

architecture: mobilenet_v3_large

ckpt_root_dir:

# THE FOLLOWING PARAMS ARE DIRECTLY USED BY HYDRA
hydra:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ arch_params:
experiment_name: mobileNetv3_small_training

architecture: mobilenet_v3_small

ckpt_root_dir:

# THE FOLLOWING PARAMS ARE DIRECTLY USED BY HYDRA
hydra:
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/recipes/imagenet_repvgg.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ multi_gpu: DDP
num_gpus: 4

architecture: repvgg_a0

ckpt_root_dir:

# THE FOLLOWING PARAMS ARE DIRECTLY USED BY HYDRA
hydra:
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/recipes/imagenet_vit_base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ experiment_name: vit_base_imagenet1k
architecture: vit_base
multi_gpu: DDP
num_gpus: 8

ckpt_root_dir:

# THE FOLLOWING PARAMS ARE DIRECTLY USED BY HYDRA
hydra:
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/recipes/imagenet_vit_large.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ architecture: vit_large
experiment_name: vit_large_imagenet1k
multi_gpu: DDP
num_gpus: 8

ckpt_root_dir:

# THE FOLLOWING PARAMS ARE DIRECTLY USED BY HYDRA
hydra:
Expand Down
22 changes: 11 additions & 11 deletions src/super_gradients/training/sg_trainer/sg_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1192,19 +1192,19 @@ def forward(self, inputs, targets):

self.ckpt_best_name = self.training_params.ckpt_best_name

if self.training_params.max_train_batches is not None and (
self.training_params.max_train_batches > len(self.train_loader) or self.training_params.max_train_batches <= 0
):

raise ValueError("max_train_batches must be positive and smaller then len(train_loader).")
if self.training_params.max_train_batches is not None:
if self.training_params.max_train_batches > len(self.train_loader):
logger.warning("max_train_batches is greater than len(self.train_loader) and will have no effect.")
elif self.training_params.max_train_batches <= 0:
raise ValueError("max_train_batches must be positive.")

if self.training_params.max_valid_batches is not None:
if self.training_params.max_valid_batches > len(self.valid_loader):
logger.warning("max_valid_batches is greater than len(self.valid_loader) and will have no effect.")
elif self.training_params.max_valid_batches <= 0:
raise ValueError("max_valid_batches must be positive.")

self.max_train_batches = self.training_params.max_train_batches

if self.training_params.max_valid_batches is not None and (
self.training_params.max_valid_batches > len(self.valid_loader) or self.training_params.max_valid_batches <= 0
):

raise ValueError("max_valid_batches must be positive and smaller then len(valid_loader).")
self.max_valid_batches = self.training_params.max_valid_batches

# STATE ATTRIBUTE SET HERE FOR SUBSEQUENT TRAIN() CALLS
Expand Down

0 comments on commit e32da15

Please sign in to comment.