Rename clear_previous_text_on_temperature argument (#398)
`prompt_reset_on_temperature` is more clear what it does.
This commit is contained in:
@@ -51,7 +51,7 @@ class TranscriptionOptions(NamedTuple):
|
|||||||
no_speech_threshold: Optional[float]
|
no_speech_threshold: Optional[float]
|
||||||
compression_ratio_threshold: Optional[float]
|
compression_ratio_threshold: Optional[float]
|
||||||
condition_on_previous_text: bool
|
condition_on_previous_text: bool
|
||||||
clear_previous_text_on_temperature: float
|
prompt_reset_on_temperature: float
|
||||||
temperatures: List[float]
|
temperatures: List[float]
|
||||||
initial_prompt: Optional[Union[str, Iterable[int]]]
|
initial_prompt: Optional[Union[str, Iterable[int]]]
|
||||||
prefix: Optional[str]
|
prefix: Optional[str]
|
||||||
@@ -172,7 +172,7 @@ class WhisperModel:
|
|||||||
log_prob_threshold: Optional[float] = -1.0,
|
log_prob_threshold: Optional[float] = -1.0,
|
||||||
no_speech_threshold: Optional[float] = 0.6,
|
no_speech_threshold: Optional[float] = 0.6,
|
||||||
condition_on_previous_text: bool = True,
|
condition_on_previous_text: bool = True,
|
||||||
clear_previous_text_on_temperature: float = 0.5,
|
prompt_reset_on_temperature: float = 0.5,
|
||||||
initial_prompt: Optional[Union[str, Iterable[int]]] = None,
|
initial_prompt: Optional[Union[str, Iterable[int]]] = None,
|
||||||
prefix: Optional[str] = None,
|
prefix: Optional[str] = None,
|
||||||
suppress_blank: bool = True,
|
suppress_blank: bool = True,
|
||||||
@@ -211,8 +211,8 @@ class WhisperModel:
|
|||||||
as a prompt for the next window; disabling may make the text inconsistent across
|
as a prompt for the next window; disabling may make the text inconsistent across
|
||||||
windows, but the model becomes less prone to getting stuck in a failure loop,
|
windows, but the model becomes less prone to getting stuck in a failure loop,
|
||||||
such as repetition looping or timestamps going out of sync.
|
such as repetition looping or timestamps going out of sync.
|
||||||
clear_previous_text_on_temperature: If the temperature is above this value,
|
prompt_reset_on_temperature: Resets prompt if temperature is above this value.
|
||||||
clear the previous text.
|
Arg has effect only if condition_on_previous_text is True.
|
||||||
initial_prompt: Optional text string or iterable of token ids to provide as a
|
initial_prompt: Optional text string or iterable of token ids to provide as a
|
||||||
prompt for the first window.
|
prompt for the first window.
|
||||||
prefix: Optional text to provide as a prefix for the first window.
|
prefix: Optional text to provide as a prefix for the first window.
|
||||||
@@ -323,7 +323,7 @@ class WhisperModel:
|
|||||||
no_speech_threshold=no_speech_threshold,
|
no_speech_threshold=no_speech_threshold,
|
||||||
compression_ratio_threshold=compression_ratio_threshold,
|
compression_ratio_threshold=compression_ratio_threshold,
|
||||||
condition_on_previous_text=condition_on_previous_text,
|
condition_on_previous_text=condition_on_previous_text,
|
||||||
clear_previous_text_on_temperature=clear_previous_text_on_temperature,
|
prompt_reset_on_temperature=prompt_reset_on_temperature,
|
||||||
temperatures=(
|
temperatures=(
|
||||||
temperature if isinstance(temperature, (list, tuple)) else [temperature]
|
temperature if isinstance(temperature, (list, tuple)) else [temperature]
|
||||||
),
|
),
|
||||||
@@ -566,7 +566,7 @@ class WhisperModel:
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
not options.condition_on_previous_text
|
not options.condition_on_previous_text
|
||||||
or temperature > options.clear_previous_text_on_temperature
|
or temperature > options.prompt_reset_on_temperature
|
||||||
):
|
):
|
||||||
prompt_reset_since = len(all_tokens)
|
prompt_reset_since = len(all_tokens)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user