Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

benchmarker API #1259

Merged
merged 13 commits into from
Sep 27, 2023
13 changes: 13 additions & 0 deletions src/deepsparse/benchmark/api/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
78 changes: 78 additions & 0 deletions src/deepsparse/benchmark/api/benchmarker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from typing import Optional

from deepsparse.benchmark.api.errors import UnclearBenchmarkerModeException
from deepsparse.benchmark.benchmark_model import benchmark_model
from deepsparse.benchmark.benchmark_pipeline import benchmark_pipeline


class Benchmarker:
"""
Benchmark API

Input arg to `model`, `pipeline` should be one of:
- SparseZoo stub
- path to a model.onnx
- path to a local folder containing a model.onnx
- path to onnx.ModelProto

Provide the stub/path to one of
- model to run deesparse.benchmark
- pipeline to run deepsparse.benchmark_pipeline
horheynm marked this conversation as resolved.
Show resolved Hide resolved
"""

def __init__(
self,
model: Optional[str] = None,
pipeline: Optional[str] = None,
):
self._validate_exactly_one_mode_selected(model, pipeline)
self.model = model
self.pipeline = pipeline

def __call__(self, **kwargs):
if self.model:
return benchmark_model(model_path=self.model, **kwargs)

if self.pipeline:
return benchmark_pipeline(model_path=self.pipeline, **kwargs)

@staticmethod
horheynm marked this conversation as resolved.
Show resolved Hide resolved
def benchmark(
model: Optional[str] = None,
pipeline: Optional[str] = None,
**kwargs,
):
if model:
benchmarker = Benchmarker(model=model)
elif pipeline:
benchmarker = Benchmarker(pipeline=pipeline)

return benchmarker(**kwargs)

def _validate_exactly_one_mode_selected(
self,
*args,
):
selections = sum(1 for mode in args if mode is not None)
if selections != 1:
raise UnclearBenchmarkerModeException(
"Benchmarker only accepts"
"one input arg for "
"'model' to run deepsparse.benchmark"
"'pipeline' to run deepsparse.benchmark_pipeline"
)
17 changes: 17 additions & 0 deletions src/deepsparse/benchmark/api/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


class UnclearBenchmarkerModeException(Exception):
pass
2 changes: 0 additions & 2 deletions src/deepsparse/benchmark/benchmark_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,6 @@ def benchmark_model(
seconds_to_warmup=warmup_time,
num_streams=num_streams,
)

export_dict = {
"engine": str(model),
"version": __version__,
Expand All @@ -435,7 +434,6 @@ def benchmark_model(
_LOGGER.info("Saving benchmark results to JSON file at {}".format(export_path))
with open(export_path, "w") as out:
json.dump(export_dict, out, indent=2)

return export_dict


Expand Down
13 changes: 13 additions & 0 deletions tests/deepsparse/benchmark/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
13 changes: 13 additions & 0 deletions tests/deepsparse/benchmark/api/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
138 changes: 138 additions & 0 deletions tests/deepsparse/benchmark/api/test_benchmarker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
from typing import Any, Dict, Optional

import pytest
from deepsparse.benchmark.api.benchmarker import Benchmarker
from deepsparse.benchmark.api.errors import UnclearBenchmarkerModeException
from deepsparse.benchmark.config import PipelineBenchmarkConfig
from sparsezoo import Model


@pytest.fixture(scope="function")
def get_model_path():
"""download model, return its path and delete at the end"""

def download_model_and_return_path(stub: str, download_path: Optional[str] = None):
model = Model(stub, download_path)
path = model.path
yield path

shutil.rmtree(path)
assert os.path.exists(path) is False

return download_model_and_return_path


@pytest.fixture
def benchmarker_fixture(get_model_path):
def get(
stub: str,
task: Optional[str] = None,
config_dict: Optional[str] = None,
model_path: Optional[str] = None,
model_args: Optional[Dict[str, Any]] = None,
pipeline_args: Dict[str, Any] = None,
):
model_path = model_path or next(get_model_path(stub=stub))

required_benchmark_model_args = model_args or {}

required_benchmark_pipeline_args = pipeline_args or {
"task": task,
"config": PipelineBenchmarkConfig(**config_dict) if config_dict else None,
}

return (
model_path,
required_benchmark_model_args,
required_benchmark_pipeline_args,
)

return get


def test_validate_exactly_one_mode_selected():
args = {
"model": "foo",
"pipeline": "bar",
}
with pytest.raises(UnclearBenchmarkerModeException):
Benchmarker(**args)


@pytest.mark.parametrize(
"stub",
[
"zoo:cv/classification/resnet_v1-50_2x/pytorch/sparseml/imagenet/base-none",
(
"zoo:nlg/text_generation/codegen_mono-350m/pytorch/huggingface/"
"bigpython_bigquery_thepile/base_quant-none"
),
],
)
def test_benchmark_model_from_benchmarker(benchmarker_fixture, stub):
path, model_args, _ = benchmarker_fixture(stub=stub)
benchmarker = Benchmarker(model=path)
export_dict = benchmarker(**model_args)
assert export_dict is not None


@pytest.mark.parametrize(
"stub,task,config_dict",
[
(
"zoo:cv/classification/resnet_v1-50_2x/pytorch/sparseml/imagenet/base-none",
"image_classification",
{
"data_type": "dummy",
"gen_sequence_length": 100,
"input_image_shape": [500, 500, 3],
"pipeline_kwargs": {},
"input_schema_kwargs": {},
},
),
(
(
"zoo:nlg/text_generation/codegen_mono-350m/pytorch/huggingface/"
"bigpython_bigquery_thepile/base_quant-none"
),
"text_generation",
{
"data_type": "dummy",
"gen_sequence_length": 100,
"pipeline_kwargs": {},
"input_schema_kwargs": {},
},
),
],
)
def test_benchmark_pipeline_from_benchmarker(
benchmarker_fixture, stub, task, config_dict
):

path, _, pipeline_args = benchmarker_fixture(
stub=stub, task=task, config_dict=config_dict
)
# [TODO]: downstream benchmark_pipeline to accept path for text_gen.
# Passes for ic
benchmarker = Benchmarker(pipeline=stub)

batch_times, total_run_time, num_streams = benchmarker(**pipeline_args)
assert batch_times is not None
assert total_run_time is not None
assert num_streams is not None
Loading