Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🚀 CI: Nightly Build #66

Merged
merged 18 commits into from
Feb 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
- [ ] This change requires a documentation update

# Checklist:
## Checklist

- [ ] My code follows the [pre-commit style and check guidelines](https://openvinotoolkit.github.io/anomalib/guides/using_pre_commit.html#pre-commit-hooks) of this project.
- [ ] I have performed a self-review of my code
Expand Down
30 changes: 30 additions & 0 deletions .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: Nightly-regression Test

on:
workflow_dispatch: # run on request (no need for PR)
schedule:
- cron: "0 0 * * *"

jobs:
Tox:
runs-on: [self-hosted, linux, x64]
strategy:
max-parallel: 1
if: github.ref == 'refs/heads/development'
steps:
- name: Print GPU status
run: nvidia-smi
- name: CHECKOUT REPOSITORY
uses: actions/checkout@v2
- name: Install Tox
run: pip install tox
- name: Coverage
run: |
export ANOMALIB_DATASET_PATH=/media/data1/datasets/MVTec
export CUDA_VISIBLE_DEVICES=2
tox -e nightly
- name: Upload coverage result
uses: actions/upload-artifact@v2
with:
name: coverage
path: .tox/coverage.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
run: |
export ANOMALIB_DATASET_PATH=/media/data1/datasets/MVTec
export CUDA_VISIBLE_DEVICES=3
tox -e coverage
tox -e pre_merge
- name: Upload coverage result
uses: actions/upload-artifact@v2
with:
Expand Down
3 changes: 2 additions & 1 deletion anomalib/loggers/wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@
from typing import Any, List, Optional, Union

import numpy as np
import wandb
from matplotlib.figure import Figure
from pytorch_lightning.loggers.wandb import WandbLogger
from pytorch_lightning.utilities import rank_zero_only

import wandb

from .base import ImageLoggerBase


Expand Down
15 changes: 15 additions & 0 deletions anomalib/utils/hpo/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
"""Utils to help in HPO search."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
53 changes: 53 additions & 0 deletions anomalib/utils/hpo/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""Utils to update configuration files."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

from typing import List

from omegaconf import DictConfig


def flatten_sweep_params(params_dict: DictConfig) -> DictConfig:
"""Flatten the nested parameters section of the config object.

Args:
params_dict: DictConfig: The dictionary containing the hpo parameters in the original, nested, structure.

Returns:
flattened version of the parameter dictionary.
"""

def process_params(nested_params: DictConfig, keys: List[str], flattened_params: DictConfig):
"""Flatten nested dictionary.

Recursive helper function that traverses the nested config object and stores the leaf nodes in a flattened
dictionary.

Args:
nested_params: DictConfig: config object containing the original parameters.
keys: List[str]: list of keys leading to the current location in the config.
flattened_params: DictConfig: Dictionary in which the flattened parameters are stored.
"""
for name, cfg in nested_params.items():
if isinstance(cfg, DictConfig):
process_params(cfg, keys + [str(name)], flattened_params)
else:
key = ".".join(keys + [str(name)])
flattened_params[key] = cfg

flattened_params_dict = DictConfig({})
process_params(params_dict, [], flattened_params_dict)

return flattened_params_dict
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ build-backend = "setuptools.build_meta"

[tool.isort]
profile = "black"
known_first_party = "wandb"
sections = ['FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY', 'LOCALFOLDER']

[tool.black]
Expand Down
2 changes: 1 addition & 1 deletion requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ kornia==0.5.6
lxml==4.6.5
matplotlib==3.4.3
networkx~=2.5
nncf==2.0.0
nncf==2.1.0
numpy~=1.19.5
omegaconf==2.1.1
pillow==9.0.0
Expand Down
9 changes: 9 additions & 0 deletions tests/helpers/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def __init__(
test_shapes: List[str] = ["hexagon", "star"],
path: Union[str, Path] = "./datasets/MVTec",
use_mvtec: bool = False,
seed: int = 0,
) -> None:
"""Creates a context for Generating Dummy Dataset. Useful for wrapping test functions.
NOTE: for MVTec dataset it does not return a category.
Expand All @@ -97,6 +98,7 @@ def __init__(
test_shapes (List[str], optional): List of anomalous shapes. Defaults to ["triangle", "ellipse"].
path (Union[str, Path], optional): Path to MVTec dataset. Defaults to "./datasets/MVTec".
use_mvtec (bool, optional): Use MVTec dataset or dummy dataset. Defaults to False.
seed (int, optional): Fixes seed if any number greater than 0 is provided. 0 means no seed. Defaults to 0.

Example:
>>> @TestDataset
Expand All @@ -112,6 +114,7 @@ def __init__(
self.test_shapes = test_shapes
self.path = path
self.use_mvtec = use_mvtec
self.seed = seed

def __call__(self, func):
@wraps(func)
Expand All @@ -129,6 +132,7 @@ def inner(*args, **kwds):
train_shapes=self.train_shapes,
test_shapes=self.test_shapes,
max_size=self.max_size,
seed=self.seed,
) as dataset_path:
kwds["category"] = "shapes"
return func(*args, path=dataset_path, **kwds)
Expand All @@ -150,6 +154,7 @@ class GeneratedDummyDataset(ContextDecorator):
max_size (Optional[int], optional): Maximum size of the test shapes. Defaults to 10.
train_shapes (List[str], optional): List of good shapes. Defaults to ["circle", "rectangle"].
test_shapes (List[str], optional): List of anomalous shapes. Defaults to ["triangle", "ellipse"].
seed (int, optional): Fixes seed if any number greater than 0 is provided. 0 means no seed. Defaults to 0.
"""

def __init__(
Expand All @@ -161,6 +166,7 @@ def __init__(
max_size: Optional[int] = 10,
train_shapes: List[str] = ["triangle", "rectangle"],
test_shapes: List[str] = ["star", "hexagon"],
seed: int = 0,
) -> None:
self.root_dir = mkdtemp()
self.num_train = num_train
Expand All @@ -170,6 +176,7 @@ def __init__(
self.image_height = img_height
self.image_width = img_width
self.max_size = max_size
self.seed = seed

def _generate_dataset(self):
"""Generates dummy dataset in a temporary directory using the same
Expand Down Expand Up @@ -226,6 +233,8 @@ def _generate_dataset(self):

def __enter__(self):
"""Creates the dataset in temp folder."""
if self.seed > 0:
np.random.seed(self.seed)
self._generate_dataset()
return self.root_dir

Expand Down
145 changes: 145 additions & 0 deletions tests/helpers/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
"""Common helpers for both nightly and pre-merge model tests."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import os
from typing import Dict, Tuple, Union

import numpy as np
from omegaconf import DictConfig, ListConfig
from pytorch_lightning import LightningDataModule, Trainer
from pytorch_lightning.callbacks import ModelCheckpoint

from anomalib.config import get_configurable_parameters, update_nncf_config
from anomalib.core.callbacks import get_callbacks
from anomalib.core.callbacks.visualizer_callback import VisualizerCallback
from anomalib.core.model.anomaly_module import AnomalyModule
from anomalib.data import get_datamodule
from anomalib.models import get_model


def setup_model_train(
model_name: str,
dataset_path: str,
project_path: str,
nncf: bool,
category: str,
score_type: str = None,
weight_file: str = "weights/last.ckpt",
fast_run: bool = False,
) -> Tuple[Union[DictConfig, ListConfig], LightningDataModule, AnomalyModule, Trainer]:
"""Train the model based on the parameters passed.

Args:
model_name (str): Name of the model to train.
dataset_path (str): Location of the dataset.
project_path (str): Path to temporary project folder.
nncf (bool): Add nncf callback.
category (str): Category to train on.
score_type (str, optional): Only used for DFM. Defaults to None.
weight_file (str, optional): Path to weight file.
fast_run (bool, optional): If set to true, the model trains for only 1 epoch. We train for one epoch as
this ensures that both anomalous and non-anomalous images are present in the validation step.

Returns:
Tuple[DictConfig, LightningDataModule, AnomalyModule, Trainer]: config, datamodule, trained model, trainer
"""
config = get_configurable_parameters(model_name=model_name)
if score_type is not None:
config.model.score_type = score_type
config.project.seed = 42
config.dataset.category = category
config.dataset.path = dataset_path
config.project.log_images_to = []

# If weight file is empty, remove the key from config
if "weight_file" in config.model.keys() and weight_file == "":
config.model.pop("weight_file")
else:
config.model.weight_file = weight_file

if nncf:
config.optimization.nncf.apply = True
config = update_nncf_config(config)
config.init_weights = None

# reassign project path as config is updated in `update_config_for_nncf`
config.project.path = project_path

datamodule = get_datamodule(config)
model = get_model(config)

callbacks = get_callbacks(config)

# Force model checkpoint to create checkpoint after first epoch
if fast_run == True:
for index, callback in enumerate(callbacks):
if isinstance(callback, ModelCheckpoint):
callbacks.pop(index)
break
model_checkpoint = ModelCheckpoint(
dirpath=os.path.join(config.project.path, "weights"),
filename="last",
monitor=None,
mode="max",
save_last=True,
auto_insert_metric_name=False,
)
callbacks.append(model_checkpoint)

for index, callback in enumerate(callbacks):
if isinstance(callback, VisualizerCallback):
callbacks.pop(index)
break

# Train the model.
if fast_run:
config.trainer.max_epochs = 1

trainer = Trainer(callbacks=callbacks, **config.trainer)
trainer.fit(model=model, datamodule=datamodule)
return config, datamodule, model, trainer


def model_load_test(config: Union[DictConfig, ListConfig], datamodule: LightningDataModule, results: Dict):
"""Create a new model based on the weights specified in config.

Args:
config ([Union[DictConfig, ListConfig]): Model config.
datamodule (LightningDataModule): Dataloader
results (Dict): Results from original model.

"""
loaded_model = get_model(config) # get new model

callbacks = get_callbacks(config)

for index, callback in enumerate(callbacks):
# Remove visualizer callback as saving results takes time
if isinstance(callback, VisualizerCallback):
callbacks.pop(index)
break

# create new trainer object with LoadModel callback (assumes it is present)
trainer = Trainer(callbacks=callbacks, **config.trainer)
# Assumes the new model has LoadModel callback and the old one had ModelCheckpoint callback
new_results = trainer.test(model=loaded_model, datamodule=datamodule)[0]
assert np.isclose(
results["image_AUROC"], new_results["image_AUROC"]
), "Loaded model does not yield close performance results"
if config.dataset.task == "segmentation":
assert np.isclose(
results["pixel_AUROC"], new_results["pixel_AUROC"]
), "Loaded model does not yield close performance results"
Loading