Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

By default avoiding generating files in temp directory #1058

Merged
merged 15 commits into from
Jun 13, 2023
Merged
3 changes: 2 additions & 1 deletion src/deepsparse/license.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
def add_deepsparse_license(token_or_path):
candidate_license_file_path = token_or_path
if not os.path.exists(token_or_path):
# write raw token to temp file for validadation
# write raw token to temp file for validation
candidate_license_tempfile = NamedTemporaryFile()
candidate_license_file_path = candidate_license_tempfile.name
with open(candidate_license_file_path, "w") as token_file:
Expand All @@ -70,6 +70,7 @@ def add_deepsparse_license(token_or_path):
license_file_path = _get_license_file_path()
shutil.copy(candidate_license_file_path, license_file_path)
_LOGGER.info(f"DeepSparse license file written to {license_file_path}")
os.remove(candidate_license_file_path)
dbogunowicz marked this conversation as resolved.
Show resolved Hide resolved

# re-validate and print message now that licensee is copied to expected location
validate_license()
Expand Down
1 change: 1 addition & 0 deletions src/deepsparse/server/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ def main(
loggers={},
)

# saving yaml config to temporary directory
with TemporaryDirectory() as tmp_dir:
config_path = os.path.join(tmp_dir, "server-config.yaml")
with open(config_path, "w") as fp:
Expand Down
12 changes: 11 additions & 1 deletion src/deepsparse/transformers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ def overwrite_transformer_onnx_model_inputs(
batch_size: int = 1,
max_length: int = 128,
output_path: Optional[str] = None,
inplace: bool = True,
) -> Tuple[Optional[str], List[str], Optional[NamedTemporaryFile]]:
"""
Overrides an ONNX model's inputs to have the given batch size and sequence lengths.
Expand All @@ -148,12 +149,21 @@ def overwrite_transformer_onnx_model_inputs(
:param output_path: if provided, the model will be saved to the given path,
otherwise, the model will be saved to a named temporary file that will
be deleted after the program exits
:param inplace: if True, the model will be modified in place, otherwise
a copy of the model will be saved to a temporary file
:return: if no output path, a tuple of the saved path to the model, list of
model input names, and reference to the tempfile object will be returned
otherwise, only the model input names will be returned
"""

if inplace and output_path is None:
raise ValueError(
"Cannot specify both inplace=True and output_path. If inplace=True, "
"the model will be modified in place (the returned path will be identical"
"to the input path specified in argument `path`)"
)
# overwrite input shapes
model = onnx.load(path)
model = onnx.load(path, load_external_data=not inplace)
initializer_input_names = set([node.name for node in model.graph.initializer])
external_inputs = [
inp for inp in model.graph.input if inp.name not in initializer_input_names
Expand Down
55 changes: 41 additions & 14 deletions src/deepsparse/utils/onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from onnx.mapping import TENSOR_TYPE_TO_NP_TYPE

from deepsparse.utils.extractor import Extractor
from sparsezoo.utils import save_onnx, validate_onnx
from sparsezoo.utils import onnx_includes_external_data, save_onnx, validate_onnx


try:
Expand Down Expand Up @@ -53,13 +53,21 @@


@contextlib.contextmanager
def save_onnx_to_temp_files(model: Model, with_external_data=True) -> str:
def save_onnx_to_temp_files(model: onnx.ModelProto, with_external_data=False) -> str:
"""
Save model to a temporary file. Works for models with external data.

:param model: The onnx model to save to temporary directory
:param with_external_data: Whether to save external data to a separate file
"""
if not onnx_includes_external_data(model) and with_external_data:
raise ValueError(
"Model does not include external data, it only includes the model graph."
"Cannot save its external data to separate a file."
"Set argument `with_external_data`=False"
)
shaped_model = tempfile.NamedTemporaryFile(mode="w", delete=False)

if with_external_data:
external_data = os.path.join(
tempfile.tempdir, next(tempfile._get_candidate_names())
Expand Down Expand Up @@ -195,16 +203,27 @@ def generate_random_inputs(


def override_onnx_batch_size(
onnx_filepath: str, batch_size: int, inplace: bool = False
onnx_filepath: str,
batch_size: int,
inplace: bool = True,
) -> str:
"""
Rewrite batch sizes of ONNX model, saving the modified model and returning its path
:param onnx_filepath: File path to ONNX model

:param onnx_filepath: File path to ONNX model. If the graph is to be
modified in-place, only the model graph will be loaded and modified.
Otherwise, the entire model will be loaded and modified, so that
external data are saved along the model graph.
:param batch_size: Override for the batch size dimension
:param inplace: If True, overwrite the original model file
:return: File path to modified ONNX model
:param inplace: If True, overwrite the original model file.
Else save the modified model to a temporary file.
:return: File path to modified ONNX model.
If inplace is True,
the modified model will be saved to the same path as the original
model. Else the modified model will be saved to a
temporary file.
"""
model = onnx.load(onnx_filepath, load_external_data=False)
model = onnx.load(onnx_filepath, load_external_data=not inplace)
all_inputs = model.graph.input
initializer_input_names = [node.name for node in model.graph.initializer]
external_inputs = [
Expand All @@ -215,30 +234,38 @@ def override_onnx_batch_size(

# Save modified model, this will be cleaned up when context is exited
if inplace:
onnx.save(model, onnx_filepath)
save_onnx(model, onnx_filepath)
return onnx_filepath
else:
# Save modified model, this will be cleaned up when context is exited
return save_onnx_to_temp_files(model, with_external_data=False)
return save_onnx_to_temp_files(model, with_external_data=not inplace)


def override_onnx_input_shapes(
onnx_filepath: str,
input_shapes: Union[List[int], List[List[int]]],
inplace: bool = False,
inplace: bool = True,
) -> str:
"""
Rewrite input shapes of ONNX model, saving the modified model and returning its path
:param onnx_filepath: File path to ONNX model

:param onnx_filepath: File path to ONNX model. If the graph is to be
modified in-place, only the model graph will be loaded and modified.
Otherwise, the entire model will be loaded and modified, so that
external data are saved along the model graph.
:param input_shapes: Override for model's input shapes
:param inplace: If True, overwrite the original model file
:return: File path to modified ONNX model
:return: File path to modified ONNX model.
If inplace is True,
the modified model will be saved to the same path as the original
model. Else the modified model will be saved to a
temporary file.
"""

if input_shapes is None:
return onnx_filepath

model = onnx.load(onnx_filepath, load_external_data=False)
model = onnx.load(onnx_filepath, load_external_data=not inplace)
all_inputs = model.graph.input
initializer_input_names = [node.name for node in model.graph.initializer]
external_inputs = [
Expand Down Expand Up @@ -279,7 +306,7 @@ def override_onnx_input_shapes(
return onnx_filepath
else:
# Save modified model, this will be cleaned up when context is exited
return save_onnx_to_temp_files(model, with_external_data=False)
return save_onnx_to_temp_files(model, with_external_data=not inplace)


def truncate_onnx_model(
Expand Down
24 changes: 15 additions & 9 deletions src/deepsparse/yolo/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import yaml

import torch
from deepsparse.utils.onnx import save_onnx_to_temp_files
from deepsparse.yolo.schemas import YOLOOutput
from sparsezoo.utils import save_onnx

Expand Down Expand Up @@ -341,7 +342,7 @@ def get_onnx_expected_image_shape(onnx_model: onnx.ModelProto) -> Tuple[int, ...


def modify_yolo_onnx_input_shape(
model_path: str, image_shape: Tuple[int, int]
model_path: str, image_shape: Tuple[int, int], inplace: bool = True
) -> Tuple[str, Optional[NamedTemporaryFile]]:
"""
Creates a new YOLO ONNX model from the given path that accepts the given input
Expand All @@ -350,13 +351,17 @@ def modify_yolo_onnx_input_shape(

:param model_path: file path to YOLO ONNX model
:param image_shape: 2-tuple of the image shape to resize this yolo model to
:return: filepath to an onnx model reshaped to the given input shape will be the
original path if the shape is the same. Additionally returns the
NamedTemporaryFile for managing the scope of the object for file deletion
:param inplace: if True, modifies the given model_path in-place, otherwise
saves the modified model to a temporary file
:return: filepath to an onnx model reshaped to the given input shape.
If inplace is True,
the modified model will be saved to the same path as the original
model. Else the modified model will be saved to a
temporary file.
"""
has_postprocessing = yolo_onnx_has_postprocessing(model_path)

model = onnx.load(model_path)
model = onnx.load(model_path, load_external_data=not inplace)
model_input = model.graph.input[0]

initial_x, initial_y = get_onnx_expected_image_shape(model)
Expand Down Expand Up @@ -399,10 +404,11 @@ def modify_yolo_onnx_input_shape(
)
set_tensor_dim_shape(model.graph.output[0], 1, num_predictions)

tmp_file = NamedTemporaryFile() # file will be deleted after program exit
save_onnx(model, tmp_file.name)

return tmp_file.name, tmp_file
if inplace:
save_onnx(model, model_path)
return model_path
else:
return save_onnx_to_temp_files(model, with_external_data=not inplace)


def get_tensor_dim_shape(tensor: onnx.TensorProto, dim: int) -> int:
Expand Down
Loading