|
@@ -94,9 +94,14 @@ def load_model(name: str, device: Optional[Union[str, torch.device]] = None, dow
|
|
|
if device is None:
|
|
|
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
if download_root is None:
|
|
|
- download_root = os.getenv(
|
|
|
- "XDG_CACHE_HOME",
|
|
|
- os.path.join(os.path.expanduser("~"), ".cache", "whisper")
|
|
|
+ download_root = os.path.join(
|
|
|
+ os.getenv(
|
|
|
+ "XDG_CACHE_HOME",
|
|
|
+ os.path.join(
|
|
|
+ os.path.expanduser("~"), ".cache"
|
|
|
+ )
|
|
|
+ ),
|
|
|
+ "whisper"
|
|
|
)
|
|
|
|
|
|
if name in _MODELS:
|