Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP Refactor test #197

Merged
merged 5 commits into from
Apr 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions tests/helpers/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from pathlib import Path
from typing import List, Optional, Union

from omegaconf import DictConfig, ListConfig

from anomalib.config import get_configurable_parameters

from .dataset import get_dataset_path


def get_test_configurable_parameters(
dataset_path: Optional[str] = None,
model_name: Optional[str] = None,
model_config_path: Optional[Union[Path, str]] = None,
weight_file: Optional[str] = None,
openvino: bool = False,
config_filename: Optional[str] = "config",
config_file_extension: Optional[str] = "yaml",
) -> Union[DictConfig, ListConfig]:
"""Get configurable parameters for testing.
Args:
datset_path: Optional[Path]: Path to dataset
model_name: Optional[str]: (Default value = None)
model_config_path: Optional[Union[Path, str]]: (Default value = None)
weight_file: Path to the weight file
openvino: Use OpenVINO
config_filename: Optional[str]: (Default value = "config")
config_file_extension: Optional[str]: (Default value = "yaml")
Returns:
Union[DictConfig, ListConfig]: Configurable parameters in DictConfig object.
"""

config = get_configurable_parameters(
model_name, model_config_path, weight_file, openvino, config_filename, config_file_extension
)

# Update path to match the dataset path in the test image/runner
config.dataset.path = get_dataset_path() if dataset_path is None else dataset_path

return config
9 changes: 4 additions & 5 deletions tests/pre_merge/datasets/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,15 @@
import numpy as np
import pytest

from anomalib.config import get_configurable_parameters, update_input_size_config
from anomalib.config import update_input_size_config
from anomalib.data import (
BTechDataModule,
FolderDataModule,
MVTecDataModule,
get_datamodule,
)
from anomalib.pre_processing.transforms import Denormalize, ToNumpy
from tests.helpers.config import get_test_configurable_parameters
from tests.helpers.dataset import TestDataset, get_dataset_path


Expand Down Expand Up @@ -202,11 +203,9 @@ class TestConfigToDataModule:
],
)
@TestDataset(num_train=20, num_test=10)
def test_image_size(self, input_size, effective_image_size, category="shapes", path=""):
def test_image_size(self, input_size, effective_image_size, category="shapes", path=None):
"""Test if the image size parameter works as expected."""
model_name = "stfpm"
configurable_parameters = get_configurable_parameters(model_name)
configurable_parameters.dataset.path = path
configurable_parameters = get_test_configurable_parameters(dataset_path=path, model_name="stfpm")
configurable_parameters.dataset.category = category
configurable_parameters.dataset.image_size = input_size
configurable_parameters = update_input_size_config(configurable_parameters)
Expand Down
81 changes: 0 additions & 81 deletions tests/pre_merge/datasets/test_transforms.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"""

import tempfile
from pathlib import Path

import albumentations as A
import numpy as np
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
import pytorch_lightning as pl
from pytorch_lightning.callbacks.early_stopping import EarlyStopping

from anomalib.config import get_configurable_parameters
from anomalib.utils.callbacks.openvino import OpenVINOCallback
from tests.helpers.config import get_test_configurable_parameters
from tests.pre_merge.utils.callbacks.openvino_callback.dummy_lightning_model import (
DummyLightningModule,
FakeDataModule,
Expand All @@ -15,7 +15,7 @@
def test_openvino_model_callback():
"""Tests if an optimized model is created."""

config = get_configurable_parameters(
config = get_test_configurable_parameters(
model_config_path="tests/pre_merge/utils/callbacks/openvino_callback/dummy_config.yml"
)

Expand Down
4 changes: 2 additions & 2 deletions tests/pre_merge/utils/metrics/test_adaptive_threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@
import torch
from pytorch_lightning import Trainer

from anomalib.config import get_configurable_parameters
from anomalib.data import get_datamodule
from anomalib.models import get_model
from anomalib.utils.callbacks import get_callbacks
from anomalib.utils.metrics import AdaptiveThreshold
from tests.helpers.config import get_test_configurable_parameters


@pytest.mark.parametrize(
Expand All @@ -48,7 +48,7 @@ def test_non_adaptive_threshold():
Test if the non-adaptive threshold gets used in the F1 score computation when
adaptive thresholding is disabled and no normalization is used.
"""
config = get_configurable_parameters(model_config_path="anomalib/models/padim/config.yaml")
config = get_test_configurable_parameters(model_config_path="anomalib/models/padim/config.yaml")

config.model.normalization_method = "none"
config.model.threshold.adaptive = False
Expand Down