From e2094b647497e18d1af4d5abfb2f9688d6166e35 Mon Sep 17 00:00:00 2001 From: Guillaume Klein Date: Fri, 17 Feb 2023 14:37:24 +0100 Subject: [PATCH] Reduce the maximum length when the prompt is longer than 448/2 --- faster_whisper/transcribe.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/faster_whisper/transcribe.py b/faster_whisper/transcribe.py index f589d85..8b9522d 100644 --- a/faster_whisper/transcribe.py +++ b/faster_whisper/transcribe.py @@ -295,6 +295,7 @@ class WhisperModel: features = self.get_input(segment) result = None final_temperature = None + max_length = min(self.max_length, 2 * (self.max_length - len(prompt))) for temperature in options.temperatures: if temperature > 0: @@ -314,7 +315,7 @@ class WhisperModel: result = self.model.generate( features, [prompt], - max_length=self.max_length, + max_length=max_length, return_scores=True, return_no_speech_prob=True, **kwargs,