Skip to content

Commit

Permalink
make fixup
Browse files Browse the repository at this point in the history
  • Loading branch information
Rocketknight1 committed Sep 14, 2023
1 parent 52d0904 commit 0be6f03
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/transformers/pipelines/conversational.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,9 @@ def _forward(self, model_inputs, minimum_tokens=10, **generate_kwargs):

n = model_inputs["input_ids"].shape[1]
if max_length - minimum_tokens < n:
logger.warning(f"Conversation input is too long ({n}), trimming it to {max_length - minimum_tokens} tokens. Consider increasing `max_length` to avoid truncation.")
logger.warning(
f"Conversation input is too long ({n}), trimming it to {max_length - minimum_tokens} tokens. Consider increasing `max_length` to avoid truncation."
)
trim = max_length - minimum_tokens
model_inputs["input_ids"] = model_inputs["input_ids"][:, -trim:]
if "attention_mask" in model_inputs:
Expand Down

0 comments on commit 0be6f03

Please sign in to comment.