Reduce the maximum length when the prompt is longer than 448/2
This commit is contained in:
@@ -295,6 +295,7 @@ class WhisperModel:
|
|||||||
features = self.get_input(segment)
|
features = self.get_input(segment)
|
||||||
result = None
|
result = None
|
||||||
final_temperature = None
|
final_temperature = None
|
||||||
|
max_length = min(self.max_length, 2 * (self.max_length - len(prompt)))
|
||||||
|
|
||||||
for temperature in options.temperatures:
|
for temperature in options.temperatures:
|
||||||
if temperature > 0:
|
if temperature > 0:
|
||||||
@@ -314,7 +315,7 @@ class WhisperModel:
|
|||||||
result = self.model.generate(
|
result = self.model.generate(
|
||||||
features,
|
features,
|
||||||
[prompt],
|
[prompt],
|
||||||
max_length=self.max_length,
|
max_length=max_length,
|
||||||
return_scores=True,
|
return_scores=True,
|
||||||
return_no_speech_prob=True,
|
return_no_speech_prob=True,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|||||||
Reference in New Issue
Block a user