Skip to content

Commit

Permalink
Fix the tests
Browse files Browse the repository at this point in the history
  • Loading branch information
dbogunowicz committed Dec 29, 2023
1 parent c65ab6e commit 3bd72bb
Show file tree
Hide file tree
Showing 11 changed files with 121 additions and 56 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/test-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ jobs:
deepsparse: ${{ steps.deepsparse-check.outputs.output }}
onnx: ${{ steps.onnx-check.outputs.output }}
pytorch: ${{ steps.pytorch-check.outputs.output }}
export: ${{ steps.export-check.outputs.output }}
steps:
- uses: actions/checkout@v2
with:
Expand Down Expand Up @@ -53,6 +54,12 @@ jobs:
((git diff --name-only origin/main HEAD | grep -E "[src|tests]/sparseml/pytorch|setup.py|.github")
|| (echo $GITHUB_REF | grep -E "refs/heads/[release/|main]"))
&& echo "::set-output name=output::1" || echo "::set-output name=output::0"
- name: "Checking if sparseml.export was changed"
id: export-check
run: >
((git diff --name-only origin/main HEAD | grep -E "[src|tests]/sparseml/export|setup.py|.github")
|| (echo $GITHUB_REF | grep -E "refs/heads/[release/|main]"))
&& echo "::set-output name=output::1" || echo "::set-output name=output::0"
base-tests:
runs-on: ubuntu-22.04
env:
Expand Down Expand Up @@ -221,3 +228,28 @@ jobs:
run: pip3 install .[dev,torch,transformers]
- name: "🔬 Running transformers tests"
run: make test TARGETS=transformers
export-tests:
runs-on: ubuntu-22.04
env:
SPARSEZOO_TEST_MODE: "true"
needs: test-setup
if: ${{needs.test-setup.outputs.export == 1}}
steps:
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- uses: actions/checkout@v2
- uses: actions/checkout@v2
with:
repository: "neuralmagic/sparsezoo"
path: "sparsezoo"
ref: ${{needs.test-setup.outputs.branch}}
- name: "⚙️ Install sparsezoo dependencies"
run: pip3 install -U pip && pip3 install setuptools sparsezoo/
- name: "Clean sparsezoo directory"
run: rm -r sparsezoo/
- name: "⚙️ Install dependencies"
run: pip3 install .[dev,torch,transformers,torchvision]
- name: "🔬 Running export tests"
run: make test TARGETS=export

5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ MDCHECKFILES := CODE_OF_CONDUCT.md CONTRIBUTING.md DEVELOPING.md README.md
SPARSEZOO_TEST_MODE := "true"

BUILD_ARGS := # set nightly to build nightly release
TARGETS := "" # targets for running pytests: deepsparse,keras,onnx,pytorch,pytorch_models,pytorch_datasets,tensorflow_v1,tensorflow_v1_models,tensorflow_v1_datasets
TARGETS := "" # targets for running pytests: deepsparse,keras,onnx,pytorch,pytorch_models,export,pytorch_datasets,tensorflow_v1,tensorflow_v1_models,tensorflow_v1_datasets
PYTEST_ARGS ?= ""
PYTEST_INTEG_ARGS ?= ""
ifneq ($(findstring deepsparse,$(TARGETS)),deepsparse)
Expand All @@ -18,6 +18,9 @@ endif
ifneq ($(findstring transformers,$(TARGETS)),transformers)
PYTEST_ARGS := $(PYTEST_ARGS) --ignore tests/sparseml/transformers
endif
ifneq ($(findstring export,$(TARGETS)),export)
PYTEST_ARGS := $(PYTEST_ARGS) --ignore tests/sparseml/export
endif
ifneq ($(findstring keras,$(TARGETS)),keras)
PYTEST_ARGS := $(PYTEST_ARGS) --ignore tests/sparseml/keras
endif
Expand Down
11 changes: 9 additions & 2 deletions src/sparseml/pytorch/image_classification/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -673,11 +673,18 @@ def is_image_classification_model(source_path: Union[Path, str]) -> bool:
else:
checkpoint_path = source_path
try:
checkpoint = torch.load(checkpoint_path)
if torch.cuda.is_available():
checkpoint = torch.load(checkpoint_path)
else:
checkpoint = torch.load(checkpoint_path, map_location=torch.device("cpu"))

arch_key = checkpoint.get("arch_key")
if arch_key:
return True
except Exception:
except Exception as e:
_LOGGER.warning(
f"Model: {checkpoint_path} not an image classification model: {e}"
)
return False


Expand Down
32 changes: 13 additions & 19 deletions src/sparseml/transformers/sparsification/obcq/obcq.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@
from sparseml.core.framework import Framework
from sparseml.modifiers.obcq.utils.helpers import ppl_eval_general
from sparseml.pytorch.model_load.helpers import (
RECIPE_FILE_NAME,
apply_recipe_structure_to_model,
fallback_to_cpu,
parse_dtype,
save_model_and_recipe,
Expand All @@ -36,7 +34,7 @@
llama_forward,
opt_forward,
)
from sparseml.transformers.utils.sparse_model import SparseAutoModel
from sparseml.transformers.utils.initializers import initialize_sparse_model


__all__ = ["one_shot"]
Expand Down Expand Up @@ -90,25 +88,29 @@ def one_shot(
config = AutoConfig.from_pretrained(model_path)
model_type = config.model_type.lower()

model_loader_fn = None
forward_fn = None
if "opt" in model_type:
model_loader_fn = SparseAutoModel.text_classification_from_pretrained
forward_fn = opt_forward
elif "llama" in model_type or "mistral" in model_type:
model_loader_fn = SparseAutoModel.text_classification_from_pretrained
forward_fn = llama_forward
else:
_LOGGER.warning(
f"A supported model type({SUPPORTED_MODELS}) could not be "
f"parsed from model_path={model_path}. Defaulting to "
"SparseAutoModel loading. "
)
model_loader_fn = SparseAutoModel.text_classification_from_pretrained
forward_fn = llama_forward

torch_dtype = parse_dtype(precision)
model = model_loader_fn(
model_path, sequence_length=sequence_length, torch_dtype=torch_dtype
# create session and initialize a sparse model
session_manager.create_session()
model = initialize_sparse_model(
model_path=model_path,
task="text-generation",
sequence_length=sequence_length,
torch_dtype=torch_dtype,
config=config,
recipe=recipe_file,
device=device,
)

if dataset_name not in SUPPORTED_DATASETS:
Expand All @@ -126,16 +128,8 @@ def one_shot(
calibration_data = dataset.loader
tokenizer = dataset.tokenizer

# create session and initialize any structure from input model recipe
session_manager.create_session()
session = session_manager.active_session()
input_recipe_path = os.path.join(model_path, RECIPE_FILE_NAME)
if os.path.exists(input_recipe_path):
apply_recipe_structure_to_model(
model=model, recipe_path=input_recipe_path, model_path=model_path
)

# launch one shot
session = session_manager.active_session()
session.apply(
framework=Framework.pytorch,
recipe=recipe_file,
Expand Down
8 changes: 3 additions & 5 deletions src/sparseml/transformers/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
from transformers import AutoConfig, AutoModel
from transformers.trainer_utils import get_last_checkpoint

import sparseml.core.session as session_manager
from sparseml.export.helpers import ONNX_MODEL_NAME
from sparseml.pytorch.model_load.helpers import apply_recipe_structure_to_model
from sparsezoo import setup_model
Expand Down Expand Up @@ -70,18 +69,17 @@ class TaskNames(Enum):


def apply_structure_to_transformers(
model: AutoModel, model_directory: Union[str, Path], recipe_path: Union[Path, str]
model: AutoModel, model_directory: Union[str, Path], recipe: Union[Path, str]
) -> None:
"""
Apply the structure (dictated by the recipe) to the model.
If no recipe is found, the model is returned as is (a warning is logged).
:param model: the model to apply the structure to
:param model_directory: the directory where the model is stored
:param recipe_path: a valid path to the recipe to apply
:param recipe: a valid path to the recipe to apply or a recipe string
"""
session_manager.create_session()
apply_recipe_structure_to_model(
model=model, recipe_path=recipe_path, model_path=model_directory
model=model, recipe_path=recipe, model_path=model_directory
)


Expand Down
53 changes: 39 additions & 14 deletions src/sparseml/transformers/utils/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,20 +206,51 @@ def resolve_recipe_application(
) -> Union[str, Path, None]:
"""
Resolve the recipe to apply to the model.
If the recipe is None, will look for a recipe in the model_path
:param recipe: the recipe to apply to the model.
If None, will look for a recipe in the model_path
It can be one of the following:
- None (no recipe will be applied or the
default recipe will be applied if exists. Default recipe
is assumed to be stored in the model_path and named RECIPE_NAME)
- a path to the recipe file
- name of the recipe file (e.g. "recipe.yaml")
(assumed to be stored in the model_path instead
of RECIPE_NAME)
- a string containing the recipe
:param model_path: the path to the model to load
:return: the resolved recipe
"""
default_recipe = os.path.join(model_path, RECIPE_NAME)
requested_recipe = None
recipe_is_file = True
if recipe:
requested_recipe = (
recipe if os.path.isfile(recipe) else os.path.join(model_path, recipe)
if os.path.isfile(recipe):
# recipe is a path to a recipe file
pass
elif os.path.isfile(os.path.join(model_path, recipe)):
# recipe is a name of a recipe file
recipe = os.path.join(model_path, recipe)
else:
# recipe is a string containing the recipe
recipe_is_file = False
_LOGGER.debug(
"Applying the recipe string directly to the model, without "
"checking for a potential existing recipe in the model_path."
)
else:
_LOGGER.info(
"No recipe requested and no default recipe "
f"found in {model_path}. Skipping recipe application."
)
return None

if recipe_is_file:
# if recipe is a file, resolve it to a path
return _resolve_recipe_file(recipe, model_path)
return recipe


def _resolve_recipe_file(
requested_recipe: Union[str, Path], model_path: Union[str, Path]
) -> Union[str, Path, None]:
default_recipe = os.path.join(model_path, RECIPE_NAME)
default_recipe_exists = os.path.isfile(default_recipe)
default_and_request_recipes_identical = default_recipe == requested_recipe

Expand Down Expand Up @@ -253,15 +284,9 @@ def resolve_recipe_application(
return requested_recipe

elif default_recipe_exists:
_LOGGER.info(f"Applying the default recip: {default_recipe}")
_LOGGER.info(f"Applying the default recipe: {default_recipe}")
return default_recipe

_LOGGER.info(
"No recipe requested and no default recipe "
f"found in {model_path}. Skipping recipe application."
)
return None


def _parse_data_args(data_args):
try:
Expand Down
3 changes: 3 additions & 0 deletions src/sparseml/transformers/utils/load_task_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,19 @@ def load_task_model(
)

if task in TaskNames.text_generation.value:
torch_dtype = kwargs.get("torch_dtype")
sequence_length = kwargs.get("sequence_length")
if sequence_length is None:
sequence_length = resolve_sequence_length(config)

return SparseAutoModel.text_generation_from_pretrained(
model_name_or_path=model_path,
sequence_length=sequence_length,
config=config,
model_type="model",
recipe=recipe,
trust_remote_code=trust_remote_code,
torch_dtype=torch_dtype,
)

raise ValueError(f"unrecognized task given of {task}")
4 changes: 2 additions & 2 deletions src/sparseml/transformers/utils/sparse_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def question_answering_from_pretrained_distil(
@staticmethod
def text_classification_from_pretrained(
model_name_or_path: str,
model_type: str,
model_type: str = "model",
**kwargs,
) -> Module:
"""
Expand Down Expand Up @@ -243,9 +243,9 @@ def text_classification_from_pretrained_distil(
@staticmethod
def text_generation_from_pretrained(
model_name_or_path: str,
model_type: str,
sequence_length: int,
config: AutoConfig,
model_type: str = "model",
recipe: Optional[Union[str, Path]] = None,
trust_remote_code: bool = False,
torch_dtype: Union[str, torch.dtype] = "auto",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from src.sparseml.integration_helper_functions import (
IntegrationHelperFunctions,
Integrations,
)


def test_integration_helper_functions():
# import needed to register the object on the fly
import sparseml.pytorch.image_classification.integration_helper_functions # noqa F401

image_classification = IntegrationHelperFunctions.load_from_registry(
Integrations.image_classification.value
yield
from sparseml.pytorch.image_classification.integration_helper_functions import (
ImageClassification,
)

image_classification = ImageClassification()
assert image_classification.create_model
assert image_classification.create_dummy_input
assert image_classification.export
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ def test_save_zoo_directory(stub, tmp_path_factory):
"stub, is_image_classification",
[("zoo:efficientnet_v2-s-imagenet-base_quantized", True)],
)
def test_is_image_classification_model(stub, is_image_classification):
path_to_model = Model(stub).training.path
def test_is_image_classification_model(tmp_path, stub, is_image_classification):
path_to_model = Model(stub, tmp_path).training.path
assert is_image_classification_model(path_to_model)
assert is_image_classification_model(Path(path_to_model))
shutil.rmtree(tmp_path)
10 changes: 8 additions & 2 deletions tests/sparseml/transformers/obcq/test_obcq.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@
from sparseml.transformers.data import TransformersDataset
from sparseml.transformers.sparsification.obcq.obcq import one_shot
from sparseml.transformers.sparsification.obcq.utils.helpers import llama_forward
from sparseml.transformers.utils.sparse_model import SparseAutoModel
from sparseml.transformers.utils.initializers import (
initialize_config,
initialize_sparse_model,
)


@pytest.mark.parametrize(
Expand Down Expand Up @@ -75,7 +78,10 @@ def test_lm_head_target():
if not torch.cuda.is_available():
device = "cpu"

model = SparseAutoModel.text_classification_from_pretrained(tiny_model_path)
config = initialize_config(model_path=tiny_model_path)
model = initialize_sparse_model(
model_path=tiny_model_path, device=device, task="text-classification", config=config
)

kwargs = {
"sparsity": 0.5,
Expand Down

0 comments on commit 3bd72bb

Please sign in to comment.