Remove ninja from docker image build (#31080)

fix

Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
Yih-Dar 2024-05-28 11:36:26 +02:00 committed by GitHub
parent 8f0f7271d0
commit 8e3b1fef97
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 4 additions and 12 deletions

View File

@ -51,6 +51,10 @@ RUN python3 -m pip install --no-cache-dir bitsandbytes
# Some tests require quanto
RUN python3 -m pip install --no-cache-dir quanto
# `quanto` will install `ninja` which leads to many `CUDA error: an illegal memory access ...` in some model tests
# (`deformable_detr`, `rwkv`, `mra`)
RUN python3 -m pip uninstall -y ninja
# For `dinat` model
# The `XXX` part in `torchXXX` needs to match `PYTORCH` (to some extent)
RUN python3 -m pip install --no-cache-dir natten==0.15.1+torch220$CUDA -f https://shi-labs.com/natten/wheels

View File

@ -232,12 +232,6 @@ class DeformableDetrModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineT
self.model_tester = DeformableDetrModelTester(self)
self.config_tester = ConfigTester(self, config_class=DeformableDetrConfig, has_text_modality=False)
@unittest.skip(
"This starts to fail since 2024/05/24, but earlier commits also fail now and affect many other tests. The error is `an illegal memory access was encountered`."
)
def test_model_parallelism(self):
super().test_model_parallelism()
def test_config(self):
# we don't test common_properties and arguments_init as these don't apply for Deformable DETR
self.config_tester.create_and_test_config_to_json_string()

View File

@ -304,12 +304,6 @@ class RwkvModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin
standardMsg = "%s not found in %s" % (safe_repr(member), safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
@unittest.skip(
"This starts to fail since 2024/05/24, but earlier commits also fail now and affect many other tests. The error is `an illegal memory access was encountered`."
)
def test_model_parallelism(self):
super().test_model_parallelism()
def test_config(self):
self.config_tester.run_common_tests()