File tree Expand file tree Collapse file tree 1 file changed +4
-5
lines changed
src/transformers/generation Expand file tree Collapse file tree 1 file changed +4
-5
lines changed Original file line number Diff line number Diff line change @@ -1452,11 +1452,10 @@ def _prepare_generated_length(
14521452 ):
14531453 generation_config .max_length -= inputs_tensor .shape [1 ]
14541454 elif has_default_max_length : # by default let's always generate 20 new tokens
1455- if generation_config .max_length == GenerationConfig ().max_length :
1456- generation_config .max_length = generation_config .max_length + input_ids_length
1457- max_position_embeddings = getattr (self .config , "max_position_embeddings" , None )
1458- if max_position_embeddings is not None :
1459- generation_config .max_length = min (generation_config .max_length , max_position_embeddings )
1455+ generation_config .max_length = generation_config .max_length + input_ids_length
1456+ max_position_embeddings = getattr (self .config , "max_position_embeddings" , None )
1457+ if max_position_embeddings is not None :
1458+ generation_config .max_length = min (generation_config .max_length , max_position_embeddings )
14601459
14611460 # same for min length
14621461 if generation_config .min_new_tokens is not None :
You can’t perform that action at this time.
0 commit comments