Batching Support, Speed Boosts, and Quality Enhancements --------- Co-authored-by: Hargun Mujral <83234565+hargunmujral@users.noreply.github.com> Co-authored-by: MahmoudAshraf97 <hassouna97.ma@gmail.com>
15 lines
430 B
Python
15 lines
430 B
Python
from faster_whisper.audio import decode_audio
|
|
from faster_whisper.transcribe import BatchedInferencePipeline, WhisperModel
|
|
from faster_whisper.utils import available_models, download_model, format_timestamp
|
|
from faster_whisper.version import __version__
|
|
|
|
__all__ = [
|
|
"available_models",
|
|
"decode_audio",
|
|
"WhisperModel",
|
|
"BatchedInferencePipeline",
|
|
"download_model",
|
|
"format_timestamp",
|
|
"__version__",
|
|
]
|