mark test_initialization as flaky in 2 model tests (#27906)

fix

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2023-12-08 14:54:32 +01:00 committed by GitHub
parent 7f07c356a4
commit 3b720ad9a5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 10 additions and 1 deletions

View File

@ -19,6 +19,7 @@ import unittest
from transformers import Dinov2Config
from transformers.testing_utils import (
is_flaky,
require_torch,
require_vision,
slow,
@ -230,6 +231,10 @@ class Dinov2ModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.model_tester = Dinov2ModelTester(self)
self.config_tester = ConfigTester(self, config_class=Dinov2Config, has_text_modality=False, hidden_size=37)
@is_flaky(max_attempts=3, description="`torch.nn.init.trunc_normal_` is flaky.")
def test_initialization(self):
super().test_initialization()
def test_config(self):
self.config_tester.run_common_tests()

View File

@ -18,7 +18,7 @@
import unittest
from transformers import VitDetConfig
from transformers.testing_utils import require_torch, torch_device
from transformers.testing_utils import is_flaky, require_torch, torch_device
from transformers.utils import is_torch_available
from ...test_backbone_common import BackboneTesterMixin
@ -175,6 +175,10 @@ class VitDetModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
self.model_tester = VitDetModelTester(self)
self.config_tester = ConfigTester(self, config_class=VitDetConfig, has_text_modality=False, hidden_size=37)
@is_flaky(max_attempts=3, description="`torch.nn.init.trunc_normal_` is flaky.")
def test_initialization(self):
super().test_initialization()
# TODO: Fix me (once this model gets more usage)
@unittest.skip("Does not work on the tiny model as we keep hitting edge cases.")
def test_cpu_offload(self):