diff --git a/tests/models/big_bird/test_modeling_big_bird.py b/tests/models/big_bird/test_modeling_big_bird.py index 7a7ad5071df26b..bda5cb62186af2 100644 --- a/tests/models/big_bird/test_modeling_big_bird.py +++ b/tests/models/big_bird/test_modeling_big_bird.py @@ -716,7 +716,7 @@ def test_block_sparse_attention_probs(self): """ if not self.test_attention_probs: - self.skip("test_attention_probs is set to False") + self.skipTest("test_attention_probs is set to False") model = BigBirdModel.from_pretrained( "google/bigbird-roberta-base", attention_type="block_sparse", num_random_blocks=3, block_size=16 diff --git a/tests/models/whisper/test_modeling_whisper.py b/tests/models/whisper/test_modeling_whisper.py index 5fc66f9a20551d..0232543d121f2f 100644 --- a/tests/models/whisper/test_modeling_whisper.py +++ b/tests/models/whisper/test_modeling_whisper.py @@ -3335,7 +3335,7 @@ def test_equivalence_flax_to_pt(self): fx_model_class_name = "Flax" + model_class.__name__ if not hasattr(transformers, fx_model_class_name): - self.skip("Flax model does not exist") + self.skipTest("Flax model does not exist") # Output all for aggressive testing config.output_hidden_states = True diff --git a/tests/test_tokenization_common.py b/tests/test_tokenization_common.py index 60ed58c4005364..867ca859ebc109 100644 --- a/tests/test_tokenization_common.py +++ b/tests/test_tokenization_common.py @@ -2678,7 +2678,7 @@ def test_np_encode_plus_sent_to_model(self): config = config_class() if config.is_encoder_decoder or config.pad_token_id is None: - self.skip("Model is not an encoder-decoder model or has no set pad token id") + self.skipTest("Model is not an encoder-decoder model or has no set pad token id") # Build sequence first_ten_tokens = list(tokenizer.get_vocab().keys())[:10]