Skip to content

Commit

Permalink
Benchmarking Script (#17)
Browse files Browse the repository at this point in the history
* Initial benchmarking script

* Finish benchmarking script.

* Fix param initialization

* Add tqdm

* Fix license issue + add license to callbacks init

* Add OpenVINO throughput

* Move pull request template to .github root

* Refactor Benchmarking script.

Changes
- Normalization support for torch inferencer.
- Move cdf normalization to separate function to reduce future code duplication.

* Add tests

* First round of refactor

* 🔥 Remove merge artefacts

* 🔨 fix merge issues

* 🚚 Move components to model

* 🔥 remove artifacts from merge

* Rename logger name

* 🔨 Add wandb to pyproject config

* Address PR comments

* Fix black version

* 🔥 remove duplicate files + revert auc changes

* Fix license + minor refactor

* Fix imports

* Fix imports + seed

* Address PR comment + refactor script

* 🩹 Minor fixes

* add spawn to context

* isort

* Properly import AnomalyModule

* Fix circular import

* address pre-commit

* Rebase development

* Log csv to wandb

* 🔥 remove redundant files from merge

* 🔨 Fix linting issues

* 🔥 remove duplicate tests from development merge

* fix cyclic imports reported by pylint

* absolute import of AnomalyModule

* Moved anomaly module and dynamic module out of base.anomaly_models

* import AnomalyModule and DynamicBufferModule from anomalib.models.components.base

* reduced coverag percentage

* Move metrics calculation to benchmark.py
Rename compute functions

Co-authored-by: Ashwin Vaidya <ashwinitinvaidya@gmail.com>
Co-authored-by: Samet Akcay <samet.akcay@intel.com>
  • Loading branch information
3 people committed Feb 9, 2022
1 parent d13e452 commit c611e43
Show file tree
Hide file tree
Showing 25 changed files with 1,154 additions and 62 deletions.
8 changes: 6 additions & 2 deletions anomalib/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@
# See the License for the specific language governing permissions
# and limitations under the License.

from .config import get_configurable_parameters, update_nncf_config
from .config import (
get_configurable_parameters,
update_input_size_config,
update_nncf_config,
)

__all__ = ["get_configurable_parameters", "update_nncf_config"]
__all__ = ["get_configurable_parameters", "update_nncf_config", "update_input_size_config"]
15 changes: 15 additions & 0 deletions anomalib/utils/callbacks/model_loader.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,19 @@
"""Callback that loads model weights from the state dict."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import torch
from pytorch_lightning import Callback, LightningModule

Expand Down
14 changes: 14 additions & 0 deletions anomalib/utils/callbacks/nncf_callback.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
"""NNCF Callback."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import os
from typing import Any, Dict, Iterator, Optional, Tuple, Union

Expand Down
15 changes: 15 additions & 0 deletions anomalib/utils/callbacks/save_to_csv.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,19 @@
"""Callback to save metrics to CSV."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

from pathlib import Path

import numpy as np
Expand Down
53 changes: 0 additions & 53 deletions anomalib/utils/hpo/config.py

This file was deleted.

32 changes: 32 additions & 0 deletions anomalib/utils/sweep/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
"""Utils for Benchmarking and Sweep."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

from .config import get_run_config, set_in_nested_config
from .helpers import (
get_meta_data,
get_openvino_throughput,
get_sweep_callbacks,
get_torch_throughput,
)

__all__ = [
"get_run_config",
"set_in_nested_config",
"get_sweep_callbacks",
"get_meta_data",
"get_openvino_throughput",
"get_torch_throughput",
]
144 changes: 144 additions & 0 deletions anomalib/utils/sweep/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
"""Utilities for modifying the configuration."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import itertools
import operator
from functools import reduce
from typing import Any, Generator, List

from omegaconf import DictConfig


def flatten_sweep_params(params_dict: DictConfig) -> DictConfig:
"""Flatten the nested parameters section of the config object.
We need to flatten the params so that all the nested keys are concatenated into a single string.
This is useful when
- We need to do a cartesian product of all the combinations of the configuration for grid search.
- Save keys as headers for csv
- Add the config to `wandb` sweep.
Args:
params_dict: DictConfig: The dictionary containing the hpo parameters in the original, nested, structure.
Returns:
flattened version of the parameter dictionary.
"""

def flatten_nested_dict(nested_params: DictConfig, keys: List[str], flattened_params: DictConfig):
"""Flatten nested dictionary.
Recursive helper function that traverses the nested config object and stores the leaf nodes in a flattened
dictionary.
Args:
nested_params: DictConfig: config object containing the original parameters.
keys: List[str]: list of keys leading to the current location in the config.
flattened_params: DictConfig: Dictionary in which the flattened parameters are stored.
"""
for name, cfg in nested_params.items():
if isinstance(cfg, DictConfig):
flatten_nested_dict(cfg, keys + [str(name)], flattened_params)
else:
key = ".".join(keys + [str(name)])
flattened_params[key] = cfg

flattened_params_dict = DictConfig({})
flatten_nested_dict(params_dict, [], flattened_params_dict)

return flattened_params_dict


def get_run_config(params_dict: DictConfig) -> Generator[DictConfig, None, None]:
"""Yields configuration for a single run.
Args:
params_dict (DictConfig): Configuration for grid search.
Example:
>>> dummy_config = DictConfig({
"parent1":{
"child1": ['a', 'b', 'c'],
"child2": [1, 2, 3]
},
"parent2":['model1', 'model2']
})
>>> for run_config in get_run_config(dummy_config):
>>> print(run_config)
{'parent1.child1': 'a', 'parent1.child2': 1, 'parent2': 'model1'}
{'parent1.child1': 'a', 'parent1.child2': 1, 'parent2': 'model2'}
{'parent1.child1': 'a', 'parent1.child2': 2, 'parent2': 'model1'}
...
Yields:
Generator[DictConfig]: Dictionary containing flattened keys
and values for current run.
"""
params = flatten_sweep_params(params_dict)
combinations = list(itertools.product(*params.values()))
keys = params.keys()
for combination in combinations:
run_config = DictConfig({})
for key, val in zip(keys, combination):
run_config[key] = val
yield run_config


def get_from_nested_config(config: DictConfig, keymap: List) -> Any:
"""Retrieves an item from a nested config object using a list of keys.
Args:
config: DictConfig: nested DictConfig object
keymap: List[str]: list of keys corresponding to item that should be retrieved.
"""
return reduce(operator.getitem, keymap, config)


def set_in_nested_config(config: DictConfig, keymap: List, value: Any):
"""Set an item in a nested config object using a list of keys.
Args:
config: DictConfig: nested DictConfig object
keymap: List[str]: list of keys corresponding to item that should be set.
value: Any: Value that should be assigned to the dictionary item at the specified location.
Example:
>>> dummy_config = DictConfig({
"parent1":{
"child1": ['a', 'b', 'c'],
"child2": [1, 2, 3]
},
"parent2":['model1', 'model2']
})
>>> model_config = DictConfig({
"parent1":{
"child1": 'e',
"child2": 4,
},
"parent3": False
})
>>> for run_config in get_run_config(dummy_config):
>>> print("Original model config", model_config)
>>> print("Suggested config", run_config)
>>> for param in run_config.keys():
>>> set_in_nested_config(model_config, param.split('.'), run_config[param])
>>> print("Replaced model config", model_config)
>>> break
Original model config {'parent1': {'child1': 'e', 'child2': 4}, 'parent3': False}
Suggested config {'parent1.child1': 'a', 'parent1.child2': 1, 'parent2': 'model1'}
Replaced model config {'parent1': {'child1': 'a', 'child2': 1}, 'parent3': False, 'parent2': 'model1'}
"""
get_from_nested_config(config, keymap[:-1])[keymap[-1]] = value
20 changes: 20 additions & 0 deletions anomalib/utils/sweep/helpers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Helpers for benchmarking and hyperparameter optimization."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

from .callbacks import get_sweep_callbacks
from .inference import get_meta_data, get_openvino_throughput, get_torch_throughput

__all__ = ["get_meta_data", "get_openvino_throughput", "get_torch_throughput", "get_sweep_callbacks"]
36 changes: 36 additions & 0 deletions anomalib/utils/sweep/helpers/callbacks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""Get callbacks related to sweep."""

# Copyright (C) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.


from typing import List

from pytorch_lightning import Callback

from anomalib.utils.callbacks.timer import TimerCallback


def get_sweep_callbacks() -> List[Callback]:
"""Gets callbacks relevant to sweep.
Args:
config (Union[DictConfig, ListConfig]): Model config loaded from anomalib
Returns:
List[Callback]: List of callbacks
"""
callbacks: List[Callback] = [TimerCallback()]

return callbacks
Loading

0 comments on commit c611e43

Please sign in to comment.