Ci-jukebox (#20613)

* fix cuda OOM by using single Prior

* only send to device when used

* use custom model

* Skip the big slow test

* Update tests/models/jukebox/test_modeling_jukebox.py

Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com>

Co-authored-by: Yih-Dar <2521628+ydshieh@users.noreply.github.com>
This commit is contained in:
Arthur 2022-12-06 16:14:03 +01:00 committed by GitHub
parent 9b14c1b6bf
commit acc439ba17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from unittest import skip
from transformers import is_torch_available
from transformers.testing_utils import require_torch, slow
@ -311,6 +312,7 @@ class Jukebox5bModelTester(unittest.TestCase):
torch.testing.assert_allclose(zs[2][0], torch.tensor(self.EXPECTED_OUTPUT_0))
@slow
@skip("Not enough GPU memory on CI runners")
def test_slow_sampling(self):
model = JukeboxModel.from_pretrained(self.model_id, min_duration=0).eval()
labels = [i.cuda() for i in self.prepare_inputs(self.model_id)]