fix accelerate tests for roberta xl (#31288)

* fix accelerate tests for roberta xl

* style
This commit is contained in:
Marc Sun 2024-06-06 15:44:35 +02:00 committed by GitHub
parent 5ba8ac54f5
commit 99895ae5e2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 1 deletions

View File

@ -572,7 +572,7 @@ class XLMRobertaXLPreTrainedModel(PreTrainedModel):
config_class = XLMRobertaXLConfig
base_model_prefix = "roberta"
_no_split_modules = ["XLMRobertaXLEmbeddings", "XLMRobertaXLSelfAttention"]
_no_split_modules = ["XLMRobertaXLEmbeddings", "XLMRobertaXLLayer"]
# Copied from transformers.models.bert.modeling_bert.BertPreTrainedModel._init_weights
def _init_weights(self, module):

View File

@ -387,6 +387,8 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
else {}
)
model_split_percents = [0.5, 0.85, 0.95]
# TODO: Fix the failed tests
def is_pipeline_test_to_skip(
self, pipeline_test_casse_name, config_class, model_architecture, tokenizer_name, processor_name