Skip to content

Commit b66082e

Browse files
authored
Revert "Fix Whisper CI (#34541)"
This reverts commit eb81144.
1 parent eb81144 commit b66082e

File tree

1 file changed

+4
-5
lines changed

1 file changed

+4
-5
lines changed

src/transformers/generation/utils.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1452,11 +1452,10 @@ def _prepare_generated_length(
14521452
):
14531453
generation_config.max_length -= inputs_tensor.shape[1]
14541454
elif has_default_max_length: # by default let's always generate 20 new tokens
1455-
if generation_config.max_length == GenerationConfig().max_length:
1456-
generation_config.max_length = generation_config.max_length + input_ids_length
1457-
max_position_embeddings = getattr(self.config, "max_position_embeddings", None)
1458-
if max_position_embeddings is not None:
1459-
generation_config.max_length = min(generation_config.max_length, max_position_embeddings)
1455+
generation_config.max_length = generation_config.max_length + input_ids_length
1456+
max_position_embeddings = getattr(self.config, "max_position_embeddings", None)
1457+
if max_position_embeddings is not None:
1458+
generation_config.max_length = min(generation_config.max_length, max_position_embeddings)
14601459

14611460
# same for min length
14621461
if generation_config.min_new_tokens is not None:

0 commit comments

Comments
 (0)