diff --git a/anomalib/models/cflow/config.yaml b/anomalib/models/cflow/config.yaml index 4dce5832b5..1fef4efd8b 100644 --- a/anomalib/models/cflow/config.yaml +++ b/anomalib/models/cflow/config.yaml @@ -54,9 +54,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/dfkde/config.yaml b/anomalib/models/dfkde/config.yaml index c19c2d31a9..f24452d5bf 100644 --- a/anomalib/models/dfkde/config.yaml +++ b/anomalib/models/dfkde/config.yaml @@ -41,9 +41,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 # Don't validate before extracting features. - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/dfkde/model.py b/anomalib/models/dfkde/model.py index a0cd0c6ad8..4c59b71d2e 100644 --- a/anomalib/models/dfkde/model.py +++ b/anomalib/models/dfkde/model.py @@ -100,7 +100,6 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]): hparams.model.backbone, hparams.model.max_training_points, threshold_steepness, threshold_offset ) - self.automatic_optimization = False self.embeddings: List[Tensor] = [] @staticmethod diff --git a/anomalib/models/dfm/config.yaml b/anomalib/models/dfm/config.yaml index 9cbcd1231f..69d7ce4214 100755 --- a/anomalib/models/dfm/config.yaml +++ b/anomalib/models/dfm/config.yaml @@ -40,9 +40,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 # Don't validate before extracting features. - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/dfm/model.py b/anomalib/models/dfm/model.py index 944c885677..28a3d22bb7 100644 --- a/anomalib/models/dfm/model.py +++ b/anomalib/models/dfm/model.py @@ -34,7 +34,6 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]): self.model: DFMModel = DFMModel( backbone=hparams.model.backbone, n_comps=hparams.model.pca_level, score_type=hparams.model.score_type ) - self.automatic_optimization = False self.embeddings: List[Tensor] = [] @staticmethod diff --git a/anomalib/models/ganomaly/config.yaml b/anomalib/models/ganomaly/config.yaml index b18584a6ee..4e69fe37e3 100644 --- a/anomalib/models/ganomaly/config.yaml +++ b/anomalib/models/ganomaly/config.yaml @@ -61,9 +61,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 2 - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/padim/config.yaml b/anomalib/models/padim/config.yaml index 0370326378..bba0aae3ea 100644 --- a/anomalib/models/padim/config.yaml +++ b/anomalib/models/padim/config.yaml @@ -54,9 +54,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 # Don't validate before extracting features. - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/padim/model.py b/anomalib/models/padim/model.py index ca44b09478..cf371bb1a0 100644 --- a/anomalib/models/padim/model.py +++ b/anomalib/models/padim/model.py @@ -294,7 +294,6 @@ def __init__(self, hparams: Union[DictConfig, ListConfig]): ).eval() self.stats: List[Tensor] = [] - self.automatic_optimization = False self.embeddings: List[Tensor] = [] @staticmethod diff --git a/anomalib/models/patchcore/config.yaml b/anomalib/models/patchcore/config.yaml index 30fbaf6118..ce565671b5 100644 --- a/anomalib/models/patchcore/config.yaml +++ b/anomalib/models/patchcore/config.yaml @@ -52,9 +52,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 # Don't validate before extracting features. - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/models/patchcore/model.py b/anomalib/models/patchcore/model.py index 5e45bb16f4..419f728926 100644 --- a/anomalib/models/patchcore/model.py +++ b/anomalib/models/patchcore/model.py @@ -275,7 +275,6 @@ def __init__(self, hparams) -> None: backbone=hparams.model.backbone, apply_tiling=hparams.dataset.tiling.apply, ) - self.automatic_optimization = False self.embeddings: List[Tensor] = [] def configure_optimizers(self) -> None: diff --git a/anomalib/models/stfpm/config.yaml b/anomalib/models/stfpm/config.yaml index 7ca654041a..a6c07c4cc8 100644 --- a/anomalib/models/stfpm/config.yaml +++ b/anomalib/models/stfpm/config.yaml @@ -61,9 +61,9 @@ trainer: auto_select_gpus: false benchmark: false check_val_every_n_epoch: 1 - checkpoint_callback: true default_root_dir: null deterministic: false + enable_checkpointing: true fast_dev_run: false gpus: 1 gradient_clip_val: 0 diff --git a/anomalib/utils/callbacks/visualizer_callback.py b/anomalib/utils/callbacks/visualizer_callback.py index 7879306624..43004be4c6 100644 --- a/anomalib/utils/callbacks/visualizer_callback.py +++ b/anomalib/utils/callbacks/visualizer_callback.py @@ -28,6 +28,7 @@ from anomalib.pre_processing.transforms import Denormalize from anomalib.utils import loggers from anomalib.utils.loggers import AnomalibWandbLogger +from anomalib.utils.loggers.base import ImageLoggerBase class VisualizerCallback(Callback): @@ -69,7 +70,7 @@ def _add_images( for log_to in module.hparams.project.log_images_to: if log_to in loggers.AVAILABLE_LOGGERS: # check if logger object is same as the requested object - if log_to in logger_type and module.logger is not None: + if log_to in logger_type and module.logger is not None and isinstance(module.logger, ImageLoggerBase): module.logger.add_image( image=visualizer.figure, name=filename.parent.name + "_" + filename.name, diff --git a/anomalib/utils/loggers/wandb.py b/anomalib/utils/loggers/wandb.py index b5694ed861..ef45766574 100644 --- a/anomalib/utils/loggers/wandb.py +++ b/anomalib/utils/loggers/wandb.py @@ -86,7 +86,7 @@ def __init__( anonymous: Optional[bool] = None, version: Optional[str] = None, project: Optional[str] = None, - log_model: Optional[bool] = False, + log_model: Union[str, bool] = False, experiment=None, prefix: Optional[str] = "", sync_step: Optional[bool] = None, diff --git a/requirements/base.txt b/requirements/base.txt index 63eed41f88..0296a08193 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ opencv-python>=4.5.3.56 opencv-contrib-python==4.5.5.62 pandas~=1.1.5 pillow==9.0.0 -pytorch-lightning==1.5.9 +pytorch-lightning>=1.6.0 torch==1.8.1 torchvision==0.9.1 torchtext==0.9.1 diff --git a/tests/helpers/model.py b/tests/helpers/model.py index a1dab835df..d1c53bc10d 100644 --- a/tests/helpers/model.py +++ b/tests/helpers/model.py @@ -109,6 +109,7 @@ def setup_model_train( # Train the model. if fast_run: config.trainer.max_epochs = 1 + config.trainer.check_val_every_n_epoch = 1 trainer = Trainer(callbacks=callbacks, **config.trainer) trainer.fit(model=model, datamodule=datamodule)