mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 02:02:21 +06:00
Revert "Fix Whisper CI" (#34605)
Revert "Fix Whisper CI (#34541)"
This reverts commit eb811449a2
.
This commit is contained in:
parent
45b0c7680c
commit
74d3824cc0
@ -1452,11 +1452,10 @@ class GenerationMixin:
|
||||
):
|
||||
generation_config.max_length -= inputs_tensor.shape[1]
|
||||
elif has_default_max_length: # by default let's always generate 20 new tokens
|
||||
if generation_config.max_length == GenerationConfig().max_length:
|
||||
generation_config.max_length = generation_config.max_length + input_ids_length
|
||||
max_position_embeddings = getattr(self.config, "max_position_embeddings", None)
|
||||
if max_position_embeddings is not None:
|
||||
generation_config.max_length = min(generation_config.max_length, max_position_embeddings)
|
||||
generation_config.max_length = generation_config.max_length + input_ids_length
|
||||
max_position_embeddings = getattr(self.config, "max_position_embeddings", None)
|
||||
if max_position_embeddings is not None:
|
||||
generation_config.max_length = min(generation_config.max_length, max_position_embeddings)
|
||||
|
||||
# same for min length
|
||||
if generation_config.min_new_tokens is not None:
|
||||
|
Loading…
Reference in New Issue
Block a user