This commit is contained in:
Qing
2023-12-24 15:32:27 +08:00
parent 0e5e16ba20
commit 371db2d771
31 changed files with 441 additions and 439 deletions

View File

@@ -1,10 +1,16 @@
# https://github.com/huggingface/huggingface_hub/blob/5a12851f54bf614be39614034ed3a9031922d297/src/huggingface_hub/utils/_runtime.py
import os
import platform
import sys
from pathlib import Path
import packaging.version
from loguru import logger
from rich import print
from typing import Dict, Any
from lama_cleaner.const import Device
_PY_VERSION: str = sys.version.split()[0].rstrip("+")
if packaging.version.Version(_PY_VERSION) < packaging.version.Version("3.8.0"):
@@ -21,7 +27,6 @@ _CANDIDATES = [
"diffusers",
"transformers",
"opencv-python",
"xformers",
"accelerate",
"lama-cleaner",
"rembg",
@@ -38,7 +43,7 @@ for name in _CANDIDATES:
def dump_environment_info() -> Dict[str, str]:
"""Dump information about the machine to help debugging issues. """
"""Dump information about the machine to help debugging issues."""
# Generic machine info
info: Dict[str, Any] = {
@@ -48,3 +53,34 @@ def dump_environment_info() -> Dict[str, str]:
info.update(_package_versions)
print("\n".join([f"- {prop}: {val}" for prop, val in info.items()]) + "\n")
return info
def check_device(device: Device) -> Device:
if device == Device.cuda:
import platform
if platform.system() == "Darwin":
logger.warning("MacOS does not support cuda, use cpu instead")
return Device.cpu
else:
import torch
if not torch.cuda.is_available():
logger.warning("CUDA is not available, use cpu instead")
return Device.cpu
elif device == Device.mps:
import torch
if not torch.backends.mps.is_available():
logger.warning("mps is not available, use cpu instead")
return Device.cpu
return device
def setup_model_dir(model_dir: Path):
model_dir = model_dir.expanduser().absolute()
os.environ["U2NET_HOME"] = str(model_dir)
os.environ["XDG_CACHE_HOME"] = str(model_dir)
if not model_dir.exists():
logger.info(f"Create model directory: {model_dir}")
model_dir.mkdir(exist_ok=True, parents=True)