Make batched suppress_tokens behaviour same as in sequential (#1194)

This commit is contained in:
Purfview
2024-12-11 11:51:38 +00:00
committed by GitHub
parent 8327d8cc64
commit f32c0e8af3

View File

@@ -495,7 +495,11 @@ class BatchedInferencePipeline:
initial_prompt=initial_prompt,
prefix=prefix,
suppress_blank=suppress_blank,
suppress_tokens=get_suppressed_tokens(tokenizer, suppress_tokens),
suppress_tokens=(
get_suppressed_tokens(tokenizer, suppress_tokens)
if suppress_tokens
else suppress_tokens
),
prepend_punctuations=prepend_punctuations,
append_punctuations=append_punctuations,
max_new_tokens=max_new_tokens,