diff --git a/whisper/hpu_utils.py b/whisper/hpu_utils.py new file mode 100644 index 0000000..7a66aa5 --- /dev/null +++ b/whisper/hpu_utils.py @@ -0,0 +1,15 @@ +import warnings + +import torch + + +def load_default_hpu() -> str: + """ + Load HPU if available, otherwise use CUDA or CPU. + """ + + if not torch.hpu.is_available(): + warnings.warn("HPU is not available; trying to use CUDA instead.") + return "cuda" if torch.cuda.is_available() else "cpu" + + return "hpu"