diff --git a/src/transformers/generation/utils.py b/src/transformers/generation/utils.py index efaafb8f246..7650276c573 100644 --- a/src/transformers/generation/utils.py +++ b/src/transformers/generation/utils.py @@ -975,12 +975,12 @@ class GenerationMixin: >>> generated_tokens = outputs.sequences[:, input_length:] >>> for tok, score in zip(generated_tokens[0], transition_scores[0]): ... # | token | token string | logits | probability - ... print(f"| {tok:5d} | {tokenizer.decode(tok):8s} | {score.numpy():.4f} | {np.exp(score.numpy()):.2%}") - | 262 | the | -1.4136 | 24.33% - | 1110 | day | -2.6089 | 7.36% - | 618 | when | -2.0096 | 13.40% - | 356 | we | -1.8593 | 15.58% - | 460 | can | -2.5083 | 8.14% + ... print(f"| {tok:5d} | {tokenizer.decode(tok):8s} | {score.numpy():.3f} | {np.exp(score.numpy()):.2%}") + | 262 | the | -1.414 | 24.33% + | 1110 | day | -2.609 | 7.36% + | 618 | when | -2.010 | 13.40% + | 356 | we | -1.859 | 15.58% + | 460 | can | -2.508 | 8.14% >>> # Example 2: Reconstruct the sequence scores from Beam Search >>> outputs = model.generate(