Fix warnings

This commit is contained in:
PiotrBLL 2024-11-09 01:20:02 +01:00
parent 2d5a552596
commit 50adb7654b

View File

@ -125,8 +125,6 @@ def transcribe(
if dtype == torch.float16: if dtype == torch.float16:
warnings.warn("FP16 is not supported on CPU; using FP32 instead") warnings.warn("FP16 is not supported on CPU; using FP32 instead")
dtype = torch.float32 dtype = torch.float32
if model.device == torch.device("hpu") and torch.hpu.is_available():
warnings.warn("Performing inference on HPU when CUDA is available")
if dtype == torch.float32: if dtype == torch.float32:
decode_options["fp16"] = False decode_options["fp16"] = False