update
This commit is contained in:
@@ -6,8 +6,6 @@ import warnings
|
||||
|
||||
warnings.simplefilter("ignore", UserWarning)
|
||||
|
||||
from lama_cleaner.parse_args import parse_args
|
||||
|
||||
|
||||
def entry_point():
|
||||
# To make os.environ["XDG_CACHE_HOME"] = args.model_cache_dir works for diffusers
|
||||
|
||||
@@ -23,14 +23,6 @@ AVAILABLE_MODELS = [
|
||||
"fcf",
|
||||
"manga",
|
||||
"cv2",
|
||||
"sd1.5",
|
||||
"anything4",
|
||||
"realisticVision1.4",
|
||||
"sd2",
|
||||
"sdxl",
|
||||
"paint_by_example",
|
||||
"instruct_pix2pix",
|
||||
"kandinsky2.2",
|
||||
]
|
||||
DIFFUSERS_MODEL_FP16_REVERSION = [
|
||||
"runwayml/stable-diffusion-inpainting",
|
||||
|
||||
@@ -41,27 +41,6 @@ def folder_name_to_show_name(name: str) -> str:
|
||||
return name.replace("models--", "").replace("--", "/")
|
||||
|
||||
|
||||
def scan_diffusers_models(
|
||||
cache_dir, class_name: List[str], model_type: ModelType
|
||||
) -> List[ModelInfo]:
|
||||
cache_dir = Path(cache_dir)
|
||||
res = []
|
||||
for it in cache_dir.glob("**/*/model_index.json"):
|
||||
with open(it, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
if data["_class_name"] in class_name:
|
||||
name = folder_name_to_show_name(it.parent.parent.parent.name)
|
||||
if name not in res:
|
||||
res.append(
|
||||
ModelInfo(
|
||||
name=name,
|
||||
path=name,
|
||||
model_type=model_type,
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
def scan_single_file_diffusion_models(cache_dir) -> List[ModelInfo]:
|
||||
cache_dir = Path(cache_dir)
|
||||
res = []
|
||||
@@ -111,7 +90,6 @@ def scan_models() -> List[ModelInfo]:
|
||||
available_models = []
|
||||
available_models.extend(scan_inpaint_models())
|
||||
available_models.extend(scan_single_file_diffusion_models(DEFAULT_MODEL_DIR))
|
||||
|
||||
cache_dir = Path(DIFFUSERS_CACHE)
|
||||
diffusers_model_names = []
|
||||
for it in cache_dir.glob("**/*/model_index.json"):
|
||||
|
||||
@@ -65,5 +65,5 @@ class Kandinsky(DiffusionInpaintModel):
|
||||
|
||||
|
||||
class Kandinsky22(Kandinsky):
|
||||
name = "kandinsky2.2"
|
||||
name = "kandinsky-community/kandinsky-2-2-decoder-inpaint"
|
||||
model_id_or_path = "kandinsky-community/kandinsky-2-2-decoder-inpaint"
|
||||
|
||||
@@ -38,12 +38,6 @@ class PaintByExample(DiffusionInpaintModel):
|
||||
else:
|
||||
self.model = self.model.to(device)
|
||||
|
||||
@staticmethod
|
||||
def download():
|
||||
from diffusers import DiffusionPipeline
|
||||
|
||||
DiffusionPipeline.from_pretrained("Fantasy-Studio/Paint-by-Example")
|
||||
|
||||
def forward(self, image, mask, config: Config):
|
||||
"""Input image and output image have same size
|
||||
image: [H, W, C] RGB
|
||||
|
||||
@@ -22,20 +22,11 @@ class ModelManager:
|
||||
self.sd_controlnet_method = ""
|
||||
self.model = self.init_model(name, device, **kwargs)
|
||||
|
||||
def _map_old_name(self, name: str) -> str:
|
||||
for old_name, model_cls in models.items():
|
||||
if name == old_name and hasattr(model_cls, "model_id_or_path"):
|
||||
name = model_cls.model_id_or_path
|
||||
break
|
||||
return name
|
||||
|
||||
@property
|
||||
def current_model(self) -> Dict:
|
||||
name = self._map_old_name(self.name)
|
||||
return self.available_models[name].model_dump()
|
||||
|
||||
def init_model(self, name: str, device, **kwargs):
|
||||
name = self._map_old_name(name)
|
||||
logger.info(f"Loading model: {name}")
|
||||
if name not in self.available_models:
|
||||
raise NotImplementedError(f"Unsupported model: {name}")
|
||||
|
||||
@@ -1,257 +0,0 @@
|
||||
import os
|
||||
import imghdr
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from lama_cleaner.const import *
|
||||
from lama_cleaner.download import cli_download_model, scan_models
|
||||
from lama_cleaner.runtime import dump_environment_info
|
||||
|
||||
DOWNLOAD_SUBCOMMAND = "download"
|
||||
|
||||
|
||||
def download_parse_args(parser):
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
subparser = subparsers.add_parser(DOWNLOAD_SUBCOMMAND, help="Download models")
|
||||
subparser.add_argument(
|
||||
"--model", help="Erase model name(lama/mat...) or model id on huggingface"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--model-dir", type=str, default=DEFAULT_MODEL_DIR, help=MODEL_DIR_HELP
|
||||
)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
||||
)
|
||||
download_parse_args(parser)
|
||||
|
||||
parser.add_argument("--host", default="127.0.0.1")
|
||||
parser.add_argument("--port", default=8080, type=int)
|
||||
|
||||
parser.add_argument(
|
||||
"--config-installer",
|
||||
action="store_true",
|
||||
help="Open config web page, mainly for windows installer",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--load-installer-config",
|
||||
action="store_true",
|
||||
help="Load all cmd args from installer config file",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--installer-config", default=None, help="Config file for windows installer"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--model",
|
||||
default=DEFAULT_MODEL,
|
||||
help=f"Available models: [{', '.join(AVAILABLE_MODELS)}], or model id on huggingface",
|
||||
)
|
||||
parser.add_argument("--no-half", action="store_true", help=NO_HALF_HELP)
|
||||
parser.add_argument("--cpu-offload", action="store_true", help=CPU_OFFLOAD_HELP)
|
||||
parser.add_argument("--disable-nsfw", action="store_true", help=DISABLE_NSFW_HELP)
|
||||
parser.add_argument(
|
||||
"--sd-cpu-textencoder", action="store_true", help=CPU_TEXTENCODER_HELP
|
||||
)
|
||||
parser.add_argument("--sd-controlnet", action="store_true", help=SD_CONTROLNET_HELP)
|
||||
parser.add_argument(
|
||||
"--sd-controlnet-method",
|
||||
default=DEFAULT_SD_CONTROLNET_METHOD,
|
||||
choices=SD_CONTROLNET_CHOICES,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--local-files-only", action="store_true", help=LOCAL_FILES_ONLY_HELP
|
||||
)
|
||||
parser.add_argument(
|
||||
"--device", default=DEFAULT_DEVICE, type=str, choices=AVAILABLE_DEVICES
|
||||
)
|
||||
parser.add_argument("--gui", action="store_true", help=GUI_HELP)
|
||||
parser.add_argument(
|
||||
"--gui-size",
|
||||
default=[1600, 1000],
|
||||
nargs=2,
|
||||
type=int,
|
||||
help="Set window size for GUI",
|
||||
)
|
||||
parser.add_argument("--input", type=str, default=None, help=INPUT_HELP)
|
||||
parser.add_argument("--output-dir", type=str, default=None, help=OUTPUT_DIR_HELP)
|
||||
parser.add_argument(
|
||||
"--model-dir", type=str, default=DEFAULT_MODEL_DIR, help=MODEL_DIR_HELP
|
||||
)
|
||||
parser.add_argument(
|
||||
"--disable-model-switch",
|
||||
action="store_true",
|
||||
help="Disable model switch in frontend",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--quality",
|
||||
default=95,
|
||||
type=int,
|
||||
help=QUALITY_HELP,
|
||||
)
|
||||
|
||||
# Plugins
|
||||
parser.add_argument(
|
||||
"--enable-interactive-seg",
|
||||
action="store_true",
|
||||
help=INTERACTIVE_SEG_HELP,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--interactive-seg-model",
|
||||
default="vit_l",
|
||||
choices=AVAILABLE_INTERACTIVE_SEG_MODELS,
|
||||
help=INTERACTIVE_SEG_MODEL_HELP,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--interactive-seg-device",
|
||||
default="cpu",
|
||||
choices=AVAILABLE_INTERACTIVE_SEG_DEVICES,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--enable-remove-bg",
|
||||
action="store_true",
|
||||
help=REMOVE_BG_HELP,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--enable-anime-seg",
|
||||
action="store_true",
|
||||
help=ANIMESEG_HELP,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--enable-realesrgan",
|
||||
action="store_true",
|
||||
help=REALESRGAN_HELP,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--realesrgan-device",
|
||||
default="cpu",
|
||||
type=str,
|
||||
choices=REALESRGAN_AVAILABLE_DEVICES,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--realesrgan-model",
|
||||
default=RealESRGANModelName.realesr_general_x4v3.value,
|
||||
type=str,
|
||||
choices=RealESRGANModelNameList,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--realesrgan-no-half",
|
||||
action="store_true",
|
||||
help="Disable half precision for RealESRGAN",
|
||||
)
|
||||
parser.add_argument("--enable-gfpgan", action="store_true", help=GFPGAN_HELP)
|
||||
parser.add_argument(
|
||||
"--gfpgan-device", default="cpu", type=str, choices=GFPGAN_AVAILABLE_DEVICES
|
||||
)
|
||||
parser.add_argument(
|
||||
"--enable-restoreformer", action="store_true", help=RESTOREFORMER_HELP
|
||||
)
|
||||
parser.add_argument(
|
||||
"--restoreformer-device",
|
||||
default="cpu",
|
||||
type=str,
|
||||
choices=RESTOREFORMER_AVAILABLE_DEVICES,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--install-plugins-package",
|
||||
action="store_true",
|
||||
)
|
||||
#########
|
||||
|
||||
args = parser.parse_args()
|
||||
# collect system info to help debug
|
||||
dump_environment_info()
|
||||
if args.subcommand == DOWNLOAD_SUBCOMMAND:
|
||||
cli_download_model(args.model, args.model_dir)
|
||||
return
|
||||
|
||||
if args.install_plugins_package:
|
||||
from lama_cleaner.installer import install_plugins_package
|
||||
|
||||
install_plugins_package()
|
||||
exit()
|
||||
|
||||
if args.config_installer:
|
||||
if args.installer_config is None:
|
||||
parser.error(
|
||||
"args.config_installer==True, must set args.installer_config to store config file"
|
||||
)
|
||||
from lama_cleaner.web_config import main
|
||||
|
||||
logger.info("Launching installer web config page")
|
||||
main(args.installer_config)
|
||||
exit()
|
||||
|
||||
if args.load_installer_config:
|
||||
if args.installer_config and not os.path.exists(args.installer_config):
|
||||
parser.error(f"args.installer_config={args.installer_config} not exists")
|
||||
|
||||
logger.info(f"Loading installer config from {args.installer_config}")
|
||||
_args = load_config(args.installer_config)
|
||||
for k, v in vars(_args).items():
|
||||
if k in vars(args):
|
||||
setattr(args, k, v)
|
||||
|
||||
if args.device == "cuda":
|
||||
import platform
|
||||
|
||||
if platform.system() == "Darwin":
|
||||
logger.info("MacOS does not support cuda, use cpu instead")
|
||||
setattr(args, "device", "cpu")
|
||||
else:
|
||||
import torch
|
||||
|
||||
if torch.cuda.is_available() is False:
|
||||
parser.error(
|
||||
"torch.cuda.is_available() is False, please use --device cpu or check your pytorch installation"
|
||||
)
|
||||
|
||||
os.environ["U2NET_HOME"] = DEFAULT_MODEL_DIR
|
||||
if args.model_dir and args.model_dir is not None:
|
||||
if os.path.isfile(args.model_dir):
|
||||
parser.error(f"invalid --model-dir: {args.model_dir} is a file")
|
||||
|
||||
if not os.path.exists(args.model_dir):
|
||||
logger.info(f"Create model cache directory: {args.model_dir}")
|
||||
Path(args.model_dir).mkdir(exist_ok=True, parents=True)
|
||||
|
||||
os.environ["XDG_CACHE_HOME"] = args.model_dir
|
||||
os.environ["U2NET_HOME"] = args.model_dir
|
||||
|
||||
if args.sd_run_local or args.local_files_only:
|
||||
os.environ["TRANSFORMERS_OFFLINE"] = "1"
|
||||
os.environ["HF_HUB_OFFLINE"] = "1"
|
||||
|
||||
if args.model not in AVAILABLE_MODELS:
|
||||
scanned_models = scan_models()
|
||||
if args.model not in [it.name for it in scanned_models]:
|
||||
parser.error(
|
||||
f"invalid --model: {args.model} not exists. Available models: {AVAILABLE_MODELS} or {[it.name for it in scanned_models]}"
|
||||
)
|
||||
|
||||
if args.input and args.input is not None:
|
||||
if not os.path.exists(args.input):
|
||||
parser.error(f"invalid --input: {args.input} not exists")
|
||||
if os.path.isfile(args.input):
|
||||
if imghdr.what(args.input) is None:
|
||||
parser.error(f"invalid --input: {args.input} is not a valid image file")
|
||||
else:
|
||||
if args.output_dir is None:
|
||||
parser.error(
|
||||
f"invalid --input: {args.input} is a directory, --output-dir is required"
|
||||
)
|
||||
|
||||
if args.output_dir is not None:
|
||||
output_dir = Path(args.output_dir)
|
||||
if not output_dir.exists():
|
||||
logger.info(f"Creating output directory: {output_dir}")
|
||||
output_dir.mkdir(parents=True)
|
||||
else:
|
||||
if not output_dir.is_dir():
|
||||
parser.error(f"invalid --output-dir: {output_dir} is not a directory")
|
||||
|
||||
return args
|
||||
@@ -585,7 +585,7 @@ def start(
|
||||
port: int = Option(8080),
|
||||
model: str = Option(
|
||||
DEFAULT_MODEL,
|
||||
help=f"Available models: [{', '.join(AVAILABLE_MODELS)}]. "
|
||||
help=f"Available erase models: [{', '.join(AVAILABLE_MODELS)}]. "
|
||||
f"You can use download command to download other SD/SDXL normal/inpainting models on huggingface",
|
||||
),
|
||||
model_dir: Path = Option(
|
||||
@@ -644,13 +644,12 @@ def start(
|
||||
os.environ["TRANSFORMERS_OFFLINE"] = "1"
|
||||
os.environ["HF_HUB_OFFLINE"] = "1"
|
||||
|
||||
if model not in AVAILABLE_MODELS:
|
||||
scanned_models = scan_models()
|
||||
if model not in [it.name for it in scanned_models]:
|
||||
logger.error(
|
||||
f"invalid --model: {model} not exists. Available models: {AVAILABLE_MODELS} or {[it.name for it in scanned_models]}"
|
||||
)
|
||||
exit()
|
||||
scanned_models = scan_models()
|
||||
if model not in [it.name for it in scanned_models]:
|
||||
logger.error(
|
||||
f"invalid model: {model} not exists. Available models: {[it.name for it in scanned_models]}"
|
||||
)
|
||||
exit()
|
||||
|
||||
global_config.image_quality = quality
|
||||
global_config.disable_model_switch = disable_model_switch
|
||||
|
||||
Reference in New Issue
Block a user