From f32c0e8af3f2843bf70071d3d14a480a6a23da66 Mon Sep 17 00:00:00 2001 From: Purfview <69023953+Purfview@users.noreply.github.com> Date: Wed, 11 Dec 2024 11:51:38 +0000 Subject: [PATCH] Make batched suppress_tokens behaviour same as in sequential (#1194) --- faster_whisper/transcribe.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/faster_whisper/transcribe.py b/faster_whisper/transcribe.py index f389f09..42272ca 100644 --- a/faster_whisper/transcribe.py +++ b/faster_whisper/transcribe.py @@ -495,7 +495,11 @@ class BatchedInferencePipeline: initial_prompt=initial_prompt, prefix=prefix, suppress_blank=suppress_blank, - suppress_tokens=get_suppressed_tokens(tokenizer, suppress_tokens), + suppress_tokens=( + get_suppressed_tokens(tokenizer, suppress_tokens) + if suppress_tokens + else suppress_tokens + ), prepend_punctuations=prepend_punctuations, append_punctuations=append_punctuations, max_new_tokens=max_new_tokens,