Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🐞 Fix inferencer in Gradio #332

Merged
merged 3 commits into from
May 25, 2022
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 11 additions & 18 deletions tools/inference_gradio.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from argparse import ArgumentParser, Namespace
from importlib import import_module
from pathlib import Path
from typing import Optional, Tuple, Union
from typing import Optional, Tuple

import gradio as gr
import gradio.inputs
Expand Down Expand Up @@ -47,16 +47,15 @@ def get_args() -> Namespace:

Example:

>>> python tools/inference_gradio.py --model ./anomalib/models/padim/config.yaml --weight_path ./results/padim/mvtec/bottle/weights/model.ckpt
>>> python tools/inference_gradio.py --model padim --weight_path ./results/padim/mvtec/bottle/weights/model.ckpt
>>> python tools/inference_gradio.py \
--config ./anomalib/models/padim/config.yaml \
--weight_path ./results/padim/mvtec/bottle/weights/model.ckpt

Returns:
Namespace: List of arguments.
"""
parser = ArgumentParser()
parser.add_argument(
"--model", type=str, required=True, help="Can be either the model name or the path to a model config file"
)
parser.add_argument("--config", type=Path, required=True, help="Path to a model config file")
parser.add_argument("--weight_path", type=Path, required=True, help="Path to a model weights")
parser.add_argument("--meta_data", type=Path, required=False, help="Path to JSON file containing the metadata.")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there is a bit of inconsistency in naming. In fact, this is the case for other entrypoints. I think we should stick to one of the following

- config, weights, meta_data
- config_path, weight_path, meta_data_path

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's alright. The whole point of the review is to ensure code quality


Expand All @@ -72,14 +71,10 @@ def get_args() -> Namespace:

args = parser.parse_args()

# Model config is passed as the input argument else assume that model name is passed to model
if args.model.endswith(".yaml"):
args.model = Path(args.model)

return args


def get_inferencer(model: Union[Path, str], weight_path: Path, meta_data_path: Optional[Path] = None) -> Inferencer:
def get_inferencer(config_path: Path, weight_path: Path, meta_data_path: Optional[Path] = None) -> Inferencer:
"""Parse args and open inferencer.

Args:
Expand All @@ -93,22 +88,20 @@ def get_inferencer(model: Union[Path, str], weight_path: Path, meta_data_path: O
Returns:
Inferencer: Torch or OpenVINO inferencer.
"""
if isinstance(model, Path):
config = get_configurable_parameters(config_path=model)
else:
config = get_configurable_parameters(model_name=model)
config = get_configurable_parameters(config_path=config_path)

# Get the inferencer. We use .ckpt extension for Torch models and (onnx, bin)
# for the openvino models.
extension = weight_path.suffix
inferencer: Inferencer
if extension in (".ckpt"):
module = import_module("anomalib.deploy.inferencers.torch")
TorchInferencer = getattr(module, "TorchInferencer") # pylint: disable=invalid-name
TorchInferencer = getattr(module, "TorchInferencer")
inferencer = TorchInferencer(config=config, model_source=weight_path, meta_data_path=meta_data_path)

elif extension in (".onnx", ".bin", ".xml"):
module = import_module("anomalib.deploy.inferencers.openvino")
OpenVINOInferencer = getattr(module, "OpenVINOInferencer") # pylint: disable=invalid-name
OpenVINOInferencer = getattr(module, "OpenVINOInferencer")
inferencer = OpenVINOInferencer(config=config, path=weight_path, meta_data_path=meta_data_path)

else:
Expand All @@ -123,7 +116,7 @@ def get_inferencer(model: Union[Path, str], weight_path: Path, meta_data_path: O
if __name__ == "__main__":
session_args = get_args()

gradio_inferencer = get_inferencer(session_args.model, session_args.weight_path, session_args.meta_data)
gradio_inferencer = get_inferencer(session_args.config, session_args.weight_path, session_args.meta_data)

interface = gr.Interface(
fn=lambda image, threshold: infer(image, gradio_inferencer, threshold),
Expand Down