mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
[ci] fix 3 remaining slow GPU failures (#4584)
This commit is contained in:
parent
365d452d4d
commit
b86e42e0ac
@ -73,10 +73,10 @@ class DistilBertConfig(PretrainedConfig):
|
||||
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
|
||||
qa_dropout (:obj:`float`, optional, defaults to 0.1):
|
||||
The dropout probabilities used in the question answering model
|
||||
:class:`~tranformers.DistilBertForQuestionAnswering`.
|
||||
:class:`~transformers.DistilBertForQuestionAnswering`.
|
||||
seq_classif_dropout (:obj:`float`, optional, defaults to 0.2):
|
||||
The dropout probabilities used in the sequence classification model
|
||||
:class:`~tranformers.DistilBertForSequenceClassification`.
|
||||
:class:`~transformers.DistilBertForSequenceClassification`.
|
||||
|
||||
Example::
|
||||
|
||||
|
@ -125,7 +125,7 @@ class EncoderDecoderModel(PreTrainedModel):
|
||||
|
||||
Examples::
|
||||
|
||||
from tranformers import EncoderDecoder
|
||||
from transformers import EncoderDecoder
|
||||
|
||||
model = EncoderDecoder.from_encoder_decoder_pretrained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert
|
||||
"""
|
||||
|
@ -240,7 +240,7 @@ class BartTranslationTests(unittest.TestCase):
|
||||
with torch.no_grad():
|
||||
logits, *other_stuff = model(**self.net_input)
|
||||
|
||||
expected_slice = torch.tensor([9.0078, 10.1113, 14.4787])
|
||||
expected_slice = torch.tensor([9.0078, 10.1113, 14.4787], device=torch_device)
|
||||
result_slice = logits[0][0][:3]
|
||||
self.assertTrue(torch.allclose(expected_slice, result_slice, atol=TOLERANCE))
|
||||
|
||||
|
@ -222,6 +222,6 @@ class TFElectraModelTest(TFModelTesterMixin, unittest.TestCase):
|
||||
@slow
|
||||
def test_model_from_pretrained(self):
|
||||
# for model_name in list(TF_ELECTRA_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
|
||||
for model_name in ["electra-small-discriminator"]:
|
||||
for model_name in ["google/electra-small-discriminator"]:
|
||||
model = TFElectraModel.from_pretrained(model_name)
|
||||
self.assertIsNotNone(model)
|
||||
|
Loading…
Reference in New Issue
Block a user