You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: bigcode_eval/utils.py
+16-5Lines changed: 16 additions & 5 deletions
Original file line number
Diff line number
Diff line change
@@ -297,11 +297,22 @@ def complete_code(
297
297
**gen_kwargs,
298
298
)
299
299
else:
300
-
generated_tokens=model.generate(
301
-
input_ids=inputs,
302
-
num_return_sequences=batch_size,
303
-
**gen_kwargs,
304
-
)
300
+
# In transformers (>= 4.40.2), if the length of input_ids == max_length, a ValueError is thrown.
301
+
# We want to ignore this error in order to reproduce old results with mbpp.
302
+
try:
303
+
generated_tokens=model.generate(
304
+
input_ids=inputs,
305
+
num_return_sequences=batch_size,
306
+
**gen_kwargs,
307
+
)
308
+
exceptValueErrorase:
309
+
# When the length of input_ids == max_length, the generation is the same as the input
310
+
ifstr(e).startswith(f"Input length of input_ids is {inputs.shape[1]}, but `max_length` is set to {gen_kwargs['max_length']}"):
311
+
warnings.warn(f"An error with the following message was thrown: {e}. Returning the input as the generation, for higher scores consider using a larger `max_length`")
0 commit comments