change crop-size to crop-margin, to add more context for crop infer

This commit is contained in:
Sanster
2022-03-24 09:08:49 +08:00
parent 1207b6e291
commit d3f1ea2474
4 changed files with 14 additions and 14 deletions

View File

@@ -102,7 +102,8 @@ def get_args_parser():
"crop each area from original image to do inference."
"Mainly for performance and memory reasons"
"Only for lama")
parser.add_argument("--crop-size", default="512,512")
parser.add_argument("--crop-margin", type=int, default=256,
help="Margin around bounding box of painted stroke when crop mode triggered")
parser.add_argument(
"--ldm-steps",
default=50,
@@ -122,10 +123,9 @@ def main():
device = torch.device(args.device)
crop_trigger_size = [int(it) for it in args.crop_trigger_size.split(",")]
crop_size = [int(it) for it in args.crop_size.split(",")]
if args.model == "lama":
model = LaMa(crop_trigger_size=crop_trigger_size, crop_size=crop_size, device=device)
model = LaMa(crop_trigger_size=crop_trigger_size, crop_margin=args.crop_margin, device=device)
elif args.model == "ldm":
model = LDM(device, steps=args.ldm_steps)
else: