Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Export Refactor][Image Classification] export_model function #1883

Merged
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/sparseml/core/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def pre_initialize_structure(
This will run the pre-initialize structure method for each modifier in the
session's lifecycle. This will also set the session's state to the
pre-initialized state. Takes care of cases when the model(s) structure
has been previosuly modified by a modifier.
has been previously modified by a modifier.

:param model: the model to pre-initialize the structure for
:param recipe: the recipe to use for the sparsification, can be a path to a
Expand Down
13 changes: 13 additions & 0 deletions src/sparseml/export/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
77 changes: 20 additions & 57 deletions src/sparseml/export.py → src/sparseml/export/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,56 +14,25 @@

import logging
from pathlib import Path
from typing import Any, Callable, List, Optional, Union

from pydantic import BaseModel, Field
from typing import Any, List, Optional, Union

from sparseml.exporters import ExportTargets
from sparseml.integration_helper_functions import (
IntegrationHelperFunctions,
infer_integration,
)
from sparseml.pytorch.opset import TORCH_DEFAULT_ONNX_OPSET
from sparsezoo.utils.registry import RegistryMixin


_LOGGER = logging.getLogger(__name__)
AVAILABLE_DEPLOYMENT_TARGETS = ["deepsparse", "onnxruntime"]


class IntegrationHelperFunctions(BaseModel, RegistryMixin):
"""
Registry that maps integration names to helper functions
for creation/export/manipulation of models for a specific
integration.
"""

create_model: Optional[Callable] = Field(
description="A function that creates a (sparse) "
"PyTorch model from a source path."
)
create_dummy_input: Optional[Callable] = Field(
description="A function that creates a dummy input "
"given a (sparse) PyTorch model."
)
export_model: Optional[Callable] = Field(
description="A function that exports a (sparse) PyTorch "
"model to an ONNX format appropriate for a "
"deployment target."
)
apply_optimizations: Optional[Callable] = Field(
description="A function that takes a set of "
"optimizations and applies them to an ONNX model."
)
export_sample_inputs_outputs: Optional[Callable] = Field(
description="A function that exports input/output samples given "
"a (sparse) PyTorch model."
)
create_deployment_folder: Optional[Callable] = Field(
description="A function that creates a "
"deployment folder for the exporter ONNX model"
"with the appropriate structure."
)
AVAILABLE_DEPLOYMENT_TARGETS = [target.value for target in ExportTargets]
ONNX_MODEL_NAME = "model.onnx"


def export(
source_path: Union[Path, str],
target_path: Union[Path, str],
model_onnx_name: str = ONNX_MODEL_NAME,
deployment_target: str = "deepsparse",
integration: Optional[str] = None,
sample_data: Optional[Any] = None,
Expand Down Expand Up @@ -91,6 +60,8 @@ def export(

:param source_path: The path to the PyTorch model to export.
:param target_path: The path to save the exported model to.
:param model_onnx_name: The name of the exported model.
Defaults to ONNX_MODEL_NAME.
:param deployment_target: The deployment target to export
the model to. Defaults to 'deepsparse'.
:param integration: The name of the integration to use for
Expand Down Expand Up @@ -132,17 +103,23 @@ def export(
IntegrationHelperFunctions.load_from_registry(integration)
)

model = helper_functions.create_model(source_path, device)
# for now, this code is not runnable, serves as a blueprint
model, auxiliary_items = helper_functions.create_model(
source_path, **kwargs # noqa: F821
)
sample_data = (
helper_functions.create_dummy_input(model, batch_size)
helper_functions.create_dummy_input(**auxiliary_items)
if sample_data is None
else sample_data
)
onnx_file_path = helper_functions.export_model(
model, sample_data, target_path, deployment_target, opset, single_graph_file
)

helper_functions.apply_optimizations(onnx_file_path, graph_optimizations)
helper_functions.apply_optimizations(
onnx_file_path,
graph_optimizations,
)

if export_sample_inputs_outputs:
helper_functions.export_sample_inputs_outputs(model, target_path)
Expand All @@ -166,20 +143,6 @@ def export(
)


def infer_integration(source_path: Union[Path, str]) -> str:
"""
Infer the integration to use for exporting the model from the source_path.
For example:
- for transformers model the integration
can be inferred from `config.json`
- for computer vision, the integration
can be inferred from the model architecture (`arch_key`)
:param source_path: The path to the PyTorch model to export.
:return: The name of the integration to use for exporting the model.
"""
raise NotImplementedError


def validate_correctness(deployment_path: Union[Path, str]):
"""
Validate the correctness of the exported model.
Expand Down
11 changes: 11 additions & 0 deletions src/sparseml/exporters/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from enum import Enum


class ExportTargets(Enum):
"""
Holds the names of the supported export targets
"""

deepsparse = "deepsparse"
onnx = "onnx"
110 changes: 110 additions & 0 deletions src/sparseml/integration_helper_functions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from enum import Enum
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Tuple, Union

from pydantic import BaseModel, Field

from sparsezoo.utils.registry import RegistryMixin


__all__ = ["IntegrationHelperFunctions", "infer_integration"]


class Integrations(Enum):
"""
Holds the names of the available integrations.
"""

image_classification = "image-classification"


class IntegrationHelperFunctions(RegistryMixin, BaseModel):
"""
Registry that maps names to helper functions
for creation/export/manipulation of models for a specific
integration.
"""

create_model: Optional[
Callable[
Tuple[Union[str, Path], Optional[Dict[str, Any]]],
Tuple["torch.nn.Module", Dict[str, Any]], # noqa F821
]
] = Field(
description="A function that takes: "
"- a source path to a PyTorch model "
"- (optionally) a dictionary of additional arguments"
"and returns: "
"- a (sparse) PyTorch model "
"- (optionally) a dictionary of additional arguments"
)
create_dummy_input: Optional[
Callable[..., "torch.Tensor"] # noqa F821
] = Field( # noqa: F82
description="A function that takes: "
"- a dictionary of arguments"
"and returns: "
"- a dummy input for the model (a torch.Tensor) "
)
export: Optional[Callable[..., str]] = Field(
description="A function that takes: "
" - a (sparse) PyTorch model "
" - sample input data "
" - the path to save the exported model to "
" - the name to save the exported ONNX model as "
" - the deployment target to export to "
" - the opset to use for the export "
" - (optionally) a dictionary of additional arguments"
"and returns nothing"
)
apply_optimizations: Optional[Callable] = Field(
description="A function that takes a set of "
"optimizations and applies them to an ONNX model."
)
export_sample_inputs_outputs: Optional[Callable] = Field(
description="A function that exports input/output samples given "
"a (sparse) PyTorch model."
)
create_deployment_folder: Optional[Callable] = Field(
description="A function that creates a "
"deployment folder for the exporter ONNX model"
"with the appropriate structure."
)


def infer_integration(source_path: Union[Path, str]) -> str:
"""
Infer the integration to use for exporting the model from the source_path.

:param source_path: The path to the PyTorch model to export.
:return: The name of the integration to use for exporting the model.
"""
from sparseml.pytorch.image_classification.utils.helpers import (
is_image_classification_model,
)

if is_image_classification_model(source_path):
# import to register the image_classification integration helper functions
import sparseml.pytorch.image_classification.integration_helper_functions # noqa F401

return Integrations.image_classification.value
else:
raise ValueError(
f"Could not infer integration from source_path: {source_path}."
f"Please specify an argument `integration` from one of"
f"the available integrations: {list(Integrations)}."
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from pathlib import Path
from typing import Any, Callable, Dict, Optional, Tuple, Union

import torch
from pydantic import Field

from sparseml.pytorch.image_classification.utils.helpers import export_model
from src.sparseml.integration_helper_functions import (
IntegrationHelperFunctions,
Integrations,
)
from src.sparseml.pytorch.image_classification.utils.helpers import (
create_model as create_image_classification_model,
)


def create_model(
source_path: Union[Path, str], **kwargs
) -> Tuple[torch.nn.Module, Dict[str, Any]]:
"""
A contract to create a model from a source path

:param source_path: The path to the model
:param kwargs: Additional kwargs to pass to the model creation function
:return: A tuple of the
- torch model
- additional dictionary of useful objects created during model creation
"""
model, *_, validation_loader = create_image_classification_model(
checkpoint_path=source_path, **kwargs
)
return model, dict(validation_loader=validation_loader)


def create_dummy_input(
validation_loader: Optional[torch.utils.data.DataLoader] = None,
image_size: Optional[int] = 224,
) -> torch.Tensor:
"""
A contract to create a dummy input for a model

:param validation_loader: The validation loader to get a batch from.
If None, a fake batch will be created
:param image_size: The image size to use for the dummy input. Defaults to 224
:return: The dummy input as a torch tensor
"""

if not validation_loader:
# create fake data for export
validation_loader = [[torch.randn(1, 3, image_size, image_size)]]
return next(iter(validation_loader))[0]


@IntegrationHelperFunctions.register(name=Integrations.image_classification.value)
class ImageClassification(IntegrationHelperFunctions):

create_model: Callable[..., Tuple[torch.nn.Module, Dict[str, Any]]] = Field(
default=create_model
)
create_dummy_input: Callable[..., torch.Tensor] = Field(default=create_dummy_input)
export: Callable[..., str] = Field(default=export_model)
Loading