mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
fix accelerate tests for roberta xl (#31288)
* fix accelerate tests for roberta xl * style
This commit is contained in:
parent
5ba8ac54f5
commit
99895ae5e2
@ -572,7 +572,7 @@ class XLMRobertaXLPreTrainedModel(PreTrainedModel):
|
||||
|
||||
config_class = XLMRobertaXLConfig
|
||||
base_model_prefix = "roberta"
|
||||
_no_split_modules = ["XLMRobertaXLEmbeddings", "XLMRobertaXLSelfAttention"]
|
||||
_no_split_modules = ["XLMRobertaXLEmbeddings", "XLMRobertaXLLayer"]
|
||||
|
||||
# Copied from transformers.models.bert.modeling_bert.BertPreTrainedModel._init_weights
|
||||
def _init_weights(self, module):
|
||||
|
@ -387,6 +387,8 @@ class XLMRobertaXLModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTes
|
||||
else {}
|
||||
)
|
||||
|
||||
model_split_percents = [0.5, 0.85, 0.95]
|
||||
|
||||
# TODO: Fix the failed tests
|
||||
def is_pipeline_test_to_skip(
|
||||
self, pipeline_test_casse_name, config_class, model_architecture, tokenizer_name, processor_name
|
||||
|
Loading…
Reference in New Issue
Block a user