Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move initialization log message to base class #363

Merged
merged 4 commits into from
Jun 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions anomalib/models/cflow/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
# See the License for the specific language governing permissions
# and limitations under the License.

import logging
from typing import List, Tuple, Union

import einops
Expand All @@ -32,8 +31,6 @@
from anomalib.models.cflow.utils import get_logp, positional_encoding_2d
from anomalib.models.components import AnomalyModule

logger = logging.getLogger(__name__)

__all__ = ["Cflow", "CflowLightning"]


Expand All @@ -54,7 +51,6 @@ def __init__(
permute_soft: bool = False,
):
super().__init__()
logger.info("Initializing Cflow Lightning model.")

self.model: CflowModel = CflowModel(
input_size=input_size,
Expand Down
5 changes: 5 additions & 0 deletions anomalib/models/components/base/anomaly_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# See the License for the specific language governing permissions
# and limitations under the License.

import logging
from abc import ABC
from typing import Any, List, Optional

Expand All @@ -28,6 +29,8 @@
MinMax,
)

logger = logging.getLogger(__name__)


class AnomalyModule(pl.LightningModule, ABC):
"""AnomalyModule to train, validate, predict and test images.
Expand All @@ -37,6 +40,8 @@ class AnomalyModule(pl.LightningModule, ABC):

def __init__(self):
super().__init__()
logger.info("Initializing %s model.", self.__class__.__name__)

self.save_hyperparameters()
self.model: nn.Module
self.loss: Tensor
Expand Down
2 changes: 0 additions & 2 deletions anomalib/models/dfkde/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,7 @@ def __init__(
threshold_steepness: float = 0.05,
threshold_offset: int = 12,
):

super().__init__()
logger.info("Initializing DFKDE Lightning model.")

self.model = DfkdeModel(
backbone=backbone,
Expand Down
1 change: 0 additions & 1 deletion anomalib/models/dfm/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ def __init__(
score_type: str = "fre",
):
super().__init__()
logger.info("Initializing DFKDE Lightning model.")

self.model: DFMModel = DFMModel(
backbone=backbone,
Expand Down
3 changes: 0 additions & 3 deletions anomalib/models/draem/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import logging
from typing import Optional, Union

import torch
Expand All @@ -19,8 +18,6 @@
from anomalib.models.draem.torch_model import DraemModel
from anomalib.models.draem.utils import Augmenter

logger = logging.getLogger(__name__)

__all__ = ["Draem", "DraemLightning"]


Expand Down
4 changes: 0 additions & 4 deletions anomalib/models/fastflow/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
# SPDX-License-Identifier: Apache-2.0
#

import logging
from typing import Tuple, Union

import torch
Expand All @@ -16,8 +15,6 @@
from anomalib.models.components import AnomalyModule
from anomalib.models.fastflow.torch_model import FastflowLoss, FastflowModel

logger = logging.getLogger(__name__)


@MODEL_REGISTRY
class Fastflow(AnomalyModule):
Expand All @@ -40,7 +37,6 @@ def __init__(
hidden_ratio: float = 1.0,
):
super().__init__()
logger.info("Initializing Fastflow Lightning model.")

self.model = FastflowModel(
input_size=input_size,
Expand Down
2 changes: 0 additions & 2 deletions anomalib/models/ganomaly/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,7 @@ def __init__(
wcon: int = 50,
wenc: int = 1,
):

super().__init__()
logger.info("Initializing Ganomaly Lightning model.")

self.model: GanomalyModel = GanomalyModel(
input_size=input_size,
Expand Down
1 change: 0 additions & 1 deletion anomalib/models/padim/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ def __init__(
backbone: str,
):
super().__init__()
logger.info("Initializing Padim Lightning model.")

self.layers = layers
self.model: PadimModel = PadimModel(
Expand Down
2 changes: 0 additions & 2 deletions anomalib/models/patchcore/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,7 @@ def __init__(
coreset_sampling_ratio: float = 0.1,
num_neighbors: int = 9,
) -> None:

super().__init__()
logger.info("Initializing Patchcore Lightning model.")

self.model: PatchcoreModel = PatchcoreModel(
input_size=input_size,
Expand Down
4 changes: 0 additions & 4 deletions anomalib/models/reverse_distillation/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
# See the License for the specific language governing permissions
# and limitations under the License.

import logging
from typing import Dict, List, Tuple, Union

from omegaconf import DictConfig, ListConfig
Expand All @@ -30,8 +29,6 @@
from .loss import ReverseDistillationLoss
from .torch_model import ReverseDistillationModel

logger = logging.getLogger(__name__)


@MODEL_REGISTRY
class ReverseDistillation(AnomalyModule):
Expand All @@ -45,7 +42,6 @@ class ReverseDistillation(AnomalyModule):

def __init__(self, input_size: Tuple[int, int], backbone: str, layers: List[str], anomaly_map_mode: str):
super().__init__()
logger.info("Initializing Reverse Distillation Lightning model.")
self.model = ReverseDistillationModel(
backbone=backbone, layers=layers, input_size=input_size, anomaly_map_mode=anomaly_map_mode
)
Expand Down
5 changes: 0 additions & 5 deletions anomalib/models/stfpm/lightning_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
# See the License for the specific language governing permissions
# and limitations under the License.

import logging
from typing import List, Tuple, Union

import torch
Expand All @@ -29,8 +28,6 @@
from anomalib.models.components import AnomalyModule
from anomalib.models.stfpm.torch_model import STFPMModel

logger = logging.getLogger(__name__)

__all__ = ["StfpmLightning"]


Expand All @@ -50,9 +47,7 @@ def __init__(
backbone: str,
layers: List[str],
):

super().__init__()
logger.info("Initializing Stfpm Lightning model.")

self.model = STFPMModel(
input_size=input_size,
Expand Down