Files
IOPaint/iopaint/runtime.py
let5sne 1b87a98261 🎨 完整的 IOPaint 项目更新
## 主要更新
-  更新所有依赖到最新稳定版本
- 📝 添加详细的项目文档和模型推荐
- 🔧 配置 VSCode Cloud Studio 预览功能
- 🐛 修复 PyTorch API 弃用警告

## 依赖更新
- diffusers: 0.27.2 → 0.35.2
- gradio: 4.21.0 → 5.46.0
- peft: 0.7.1 → 0.18.0
- Pillow: 9.5.0 → 11.3.0
- fastapi: 0.108.0 → 0.116.2

## 新增文件
- CLAUDE.md - 项目架构和开发指南
- UPGRADE_NOTES.md - 详细的升级说明
- .vscode/preview.yml - 预览配置
- .vscode/LAUNCH_GUIDE.md - 启动指南
- .gitignore - 更新的忽略规则

## 代码修复
- 修复 iopaint/model/ldm.py 中的 torch.cuda.amp.autocast() 弃用警告

## 文档更新
- README.md - 添加模型推荐和使用指南
- 完整的项目源码(iopaint/)
- Web 前端源码(web_app/)

🤖 Generated with Claude Code
2025-11-28 17:10:24 +00:00

88 lines
2.4 KiB
Python

# https://github.com/huggingface/huggingface_hub/blob/5a12851f54bf614be39614034ed3a9031922d297/src/huggingface_hub/utils/_runtime.py
import os
import platform
import sys
from pathlib import Path
import packaging.version
from iopaint.schema import Device
from loguru import logger
from rich import print
from typing import Dict, Any
_PY_VERSION: str = sys.version.split()[0].rstrip("+")
if packaging.version.Version(_PY_VERSION) < packaging.version.Version("3.8.0"):
import importlib_metadata # type: ignore
else:
import importlib.metadata as importlib_metadata # type: ignore
_package_versions = {}
_CANDIDATES = [
"torch",
"torchvision",
"Pillow",
"diffusers",
"transformers",
"opencv-python",
"accelerate",
"iopaint",
"rembg",
"onnxruntime",
]
# Check once at runtime
for name in _CANDIDATES:
_package_versions[name] = "N/A"
try:
_package_versions[name] = importlib_metadata.version(name)
except importlib_metadata.PackageNotFoundError:
pass
def dump_environment_info() -> Dict[str, str]:
"""Dump information about the machine to help debugging issues."""
# Generic machine info
info: Dict[str, Any] = {
"Platform": platform.platform(),
"Python version": platform.python_version(),
}
info.update(_package_versions)
print("\n".join([f"- {prop}: {val}" for prop, val in info.items()]) + "\n")
return info
def check_device(device: Device) -> Device:
if device == Device.cuda:
import platform
if platform.system() == "Darwin":
logger.warning("MacOS does not support cuda, use cpu instead")
return Device.cpu
else:
import torch
if not torch.cuda.is_available():
logger.warning("CUDA is not available, use cpu instead")
return Device.cpu
elif device == Device.mps:
import torch
if not torch.backends.mps.is_available():
logger.warning("mps is not available, use cpu instead")
return Device.cpu
return device
def setup_model_dir(model_dir: Path):
model_dir = model_dir.expanduser().absolute()
logger.info(f"Model directory: {model_dir}")
os.environ["U2NET_HOME"] = str(model_dir)
os.environ["XDG_CACHE_HOME"] = str(model_dir)
if not model_dir.exists():
logger.info(f"Create model directory: {model_dir}")
model_dir.mkdir(exist_ok=True, parents=True)
return model_dir