Skip to content

Commit

Permalink
Removing _use_sdpa attributes and their fix-copies
Browse files Browse the repository at this point in the history
  • Loading branch information
hackyon committed Feb 14, 2024
1 parent b68240d commit fe6db3c
Show file tree
Hide file tree
Showing 10 changed files with 0 additions and 40 deletions.
4 changes: 0 additions & 4 deletions src/transformers/models/altclip/modeling_altclip.py
Original file line number Diff line number Diff line change
Expand Up @@ -1214,7 +1214,6 @@ class AltRobertaModel(AltCLIPPreTrainedModel):

config_class = AltCLIPTextConfig

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->AltRoberta
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -1224,9 +1223,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = AltRobertaPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/camembert/modeling_camembert.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,6 @@ class CamembertModel(CamembertPreTrainedModel):

_no_split_modules = []

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->Camembert
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -765,9 +764,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = CamembertPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/clap/modeling_clap.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,7 +1773,6 @@ class ClapTextModel(ClapPreTrainedModel):

config_class = ClapTextConfig

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->ClapText
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -1783,9 +1782,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = ClapTextPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/ernie/modeling_ernie.py
Original file line number Diff line number Diff line change
Expand Up @@ -810,7 +810,6 @@ class ErnieModel(ErniePreTrainedModel):
`add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->Ernie
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -820,9 +819,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = ErniePooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/markuplm/modeling_markuplm.py
Original file line number Diff line number Diff line change
Expand Up @@ -811,7 +811,6 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P
MARKUPLM_START_DOCSTRING,
)
class MarkupLMModel(MarkupLMPreTrainedModel):
# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->MarkupLM
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -821,9 +820,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = MarkupLMPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/roberta/modeling_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -702,7 +702,6 @@ class RobertaModel(RobertaPreTrainedModel):
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->Roberta
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -712,9 +711,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = RobertaPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/roc_bert/modeling_roc_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -892,7 +892,6 @@ class RoCBertModel(RoCBertPreTrainedModel):
`add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->RoCBert
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -902,9 +901,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = RoCBertPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/xlm_roberta/modeling_xlm_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,7 +704,6 @@ class XLMRobertaModel(XLMRobertaPreTrainedModel):
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->XLMRoberta
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -714,9 +713,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = XLMRobertaPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,6 @@ class XLMRobertaXLModel(XLMRobertaXLPreTrainedModel):
an input to the forward pass. .. _*Attention is all you need*: https://arxiv.org/abs/1706.03762
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->XLMRobertaXL
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -677,9 +676,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = XLMRobertaXLPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down
4 changes: 0 additions & 4 deletions src/transformers/models/xmod/modeling_xmod.py
Original file line number Diff line number Diff line change
Expand Up @@ -793,7 +793,6 @@ class XmodModel(XmodPreTrainedModel):
"""

# Copied from transformers.models.bert.modeling_bert.BertModel.__init__ with Bert->Xmod
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
Expand All @@ -803,9 +802,6 @@ def __init__(self, config, add_pooling_layer=True):

self.pooler = XmodPooler(config) if add_pooling_layer else None

self._use_sdpa = config._attn_implementation == "sdpa"
self.position_embedding_type = config.position_embedding_type

# Initialize weights and apply final processing
self.post_init()

Expand Down

0 comments on commit fe6db3c

Please sign in to comment.