Spaces:
Running
Running
File size: 1,501 Bytes
b30c9c5 4c4ecfe b30c9c5 88d2794 b30c9c5 88d2794 b30c9c5 4c4ecfe b30c9c5 88d2794 b30c9c5 4c4ecfe b30c9c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from modules.utils.paths import *
from modules.whisper.whisper_factory import WhisperFactory
from modules.whisper.data_classes import *
from test_config import *
from test_transcription import download_file, test_transcribe
import gradio as gr
import pytest
import torch
import os
@pytest.mark.skipif(
not is_cuda_available(),
reason="Skipping because the test only works on GPU"
)
@pytest.mark.parametrize(
"whisper_type,vad_filter,bgm_separation,diarization",
[
(WhisperImpl.WHISPER.value, False, True, False),
(WhisperImpl.FASTER_WHISPER.value, False, True, False),
(WhisperImpl.INSANELY_FAST_WHISPER.value, False, True, False)
]
)
def test_bgm_separation_pipeline(
whisper_type: str,
vad_filter: bool,
bgm_separation: bool,
diarization: bool,
):
test_transcribe(whisper_type, vad_filter, bgm_separation, diarization)
@pytest.mark.skipif(
not is_cuda_available(),
reason="Skipping because the test only works on GPU"
)
@pytest.mark.parametrize(
"whisper_type,vad_filter,bgm_separation,diarization",
[
(WhisperImpl.WHISPER.value, True, True, False),
(WhisperImpl.FASTER_WHISPER.value, True, True, False),
(WhisperImpl.INSANELY_FAST_WHISPER.value, True, True, False)
]
)
def test_bgm_separation_with_vad_pipeline(
whisper_type: str,
vad_filter: bool,
bgm_separation: bool,
diarization: bool,
):
test_transcribe(whisper_type, vad_filter, bgm_separation, diarization)
|