## 主要更新 - ✨ 更新所有依赖到最新稳定版本 - 📝 添加详细的项目文档和模型推荐 - 🔧 配置 VSCode Cloud Studio 预览功能 - 🐛 修复 PyTorch API 弃用警告 ## 依赖更新 - diffusers: 0.27.2 → 0.35.2 - gradio: 4.21.0 → 5.46.0 - peft: 0.7.1 → 0.18.0 - Pillow: 9.5.0 → 11.3.0 - fastapi: 0.108.0 → 0.116.2 ## 新增文件 - CLAUDE.md - 项目架构和开发指南 - UPGRADE_NOTES.md - 详细的升级说明 - .vscode/preview.yml - 预览配置 - .vscode/LAUNCH_GUIDE.md - 启动指南 - .gitignore - 更新的忽略规则 ## 代码修复 - 修复 iopaint/model/ldm.py 中的 torch.cuda.amp.autocast() 弃用警告 ## 文档更新 - README.md - 添加模型推荐和使用指南 - 完整的项目源码(iopaint/) - Web 前端源码(web_app/) 🤖 Generated with Claude Code
74 lines
2.1 KiB
Python
74 lines
2.1 KiB
Python
|
|
|
|
import torch
|
|
import torch.nn as nn
|
|
import torch.nn.functional as F
|
|
|
|
|
|
class Hswish(nn.Module):
|
|
def __init__(self, inplace=True):
|
|
super(Hswish, self).__init__()
|
|
self.inplace = inplace
|
|
|
|
def forward(self, x):
|
|
return x * F.relu6(x + 3., inplace=self.inplace) / 6.
|
|
|
|
# out = max(0, min(1, slop*x+offset))
|
|
# paddle.fluid.layers.hard_sigmoid(x, slope=0.2, offset=0.5, name=None)
|
|
class Hsigmoid(nn.Module):
|
|
def __init__(self, inplace=True):
|
|
super(Hsigmoid, self).__init__()
|
|
self.inplace = inplace
|
|
|
|
def forward(self, x):
|
|
# torch: F.relu6(x + 3., inplace=self.inplace) / 6.
|
|
# paddle: F.relu6(1.2 * x + 3., inplace=self.inplace) / 6.
|
|
return F.relu6(1.2 * x + 3., inplace=self.inplace) / 6.
|
|
|
|
class GELU(nn.Module):
|
|
def __init__(self, inplace=True):
|
|
super(GELU, self).__init__()
|
|
self.inplace = inplace
|
|
|
|
def forward(self, x):
|
|
return torch.nn.functional.gelu(x)
|
|
|
|
|
|
class Swish(nn.Module):
|
|
def __init__(self, inplace=True):
|
|
super(Swish, self).__init__()
|
|
self.inplace = inplace
|
|
|
|
def forward(self, x):
|
|
if self.inplace:
|
|
x.mul_(torch.sigmoid(x))
|
|
return x
|
|
else:
|
|
return x*torch.sigmoid(x)
|
|
|
|
|
|
class Activation(nn.Module):
|
|
def __init__(self, act_type, inplace=True):
|
|
super(Activation, self).__init__()
|
|
act_type = act_type.lower()
|
|
if act_type == 'relu':
|
|
self.act = nn.ReLU(inplace=inplace)
|
|
elif act_type == 'relu6':
|
|
self.act = nn.ReLU6(inplace=inplace)
|
|
elif act_type == 'sigmoid':
|
|
raise NotImplementedError
|
|
elif act_type == 'hard_sigmoid':
|
|
self.act = Hsigmoid(inplace)
|
|
elif act_type == 'hard_swish':
|
|
self.act = Hswish(inplace=inplace)
|
|
elif act_type == 'leakyrelu':
|
|
self.act = nn.LeakyReLU(inplace=inplace)
|
|
elif act_type == 'gelu':
|
|
self.act = GELU(inplace=inplace)
|
|
elif act_type == 'swish':
|
|
self.act = Swish(inplace=inplace)
|
|
else:
|
|
raise NotImplementedError
|
|
|
|
def forward(self, inputs):
|
|
return self.act(inputs) |