Files
AI-Video/workflows/selfhost/image_flux.json
2025-11-07 16:59:12 +08:00

177 lines
2.9 KiB
JSON

{
"29": {
"inputs": {
"seed": 1067822190154760,
"steps": 20,
"cfg": 1,
"sampler_name": "euler",
"scheduler": "simple",
"denoise": 1,
"model": [
"48",
0
],
"positive": [
"35",
0
],
"negative": [
"33",
0
],
"latent_image": [
"43",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"31": {
"inputs": {
"text": [
"46",
0
],
"clip": [
"47",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"33": {
"inputs": {
"conditioning": [
"31",
0
]
},
"class_type": "ConditioningZeroOut",
"_meta": {
"title": "ConditioningZeroOut"
}
},
"35": {
"inputs": {
"guidance": 3.5,
"conditioning": [
"31",
0
]
},
"class_type": "FluxGuidance",
"_meta": {
"title": "FluxGuidance"
}
},
"36": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"37",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"37": {
"inputs": {
"samples": [
"29",
0
],
"vae": [
"49",
0
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"41": {
"inputs": {
"value": 1024
},
"class_type": "easy int",
"_meta": {
"title": "$width.value"
}
},
"42": {
"inputs": {
"value": 1024
},
"class_type": "easy int",
"_meta": {
"title": "$height.value"
}
},
"43": {
"inputs": {
"width": [
"41",
0
],
"height": [
"42",
0
],
"batch_size": 1
},
"class_type": "EmptyLatentImage",
"_meta": {
"title": "Empty Latent Image"
}
},
"46": {
"inputs": {
"value": "Minimalist black-and-white matchstick figure style illustration, clean lines, simple sketch style, a dog"
},
"class_type": "PrimitiveStringMultiline",
"_meta": {
"title": "$prompt.value!"
}
},
"47": {
"inputs": {
"clip_name1": "clip_l.safetensors",
"clip_name2": "t5xxl_fp8_e4m3fn.safetensors",
"type": "flux",
"device": "default"
},
"class_type": "DualCLIPLoader",
"_meta": {
"title": "DualCLIPLoader"
}
},
"48": {
"inputs": {
"unet_name": "flux1-dev.safetensors",
"weight_dtype": "default"
},
"class_type": "UNETLoader",
"_meta": {
"title": "Load Diffusion Model"
}
},
"49": {
"inputs": {
"vae_name": "ae.safetensors"
},
"class_type": "VAELoader",
"_meta": {
"title": "Load VAE"
}
}
}