{ "id": "908d0bfb-e192-4627-9b57-147496e6e2dd", "revision": 0, "last_node_id": 73, "last_link_id": 98, "nodes": [ { "id": 40, "type": "DualCLIPLoader", "pos": [ -320, 290 ], "size": [ 270, 130 ], "flags": {}, "order": 0, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP", "type": "CLIP", "links": [ 64 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "DualCLIPLoader", "models": [ { "name": "clip_l.safetensors", "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors", "directory": "text_encoders" }, { "name": "t5xxl_fp16.safetensors", "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors", "directory": "text_encoders" } ] }, "widgets_values": [ "clip_l.safetensors", "t5xxl_fp16.safetensors", "flux", "default" ] }, { "id": 39, "type": "VAELoader", "pos": [ -320, 470 ], "size": [ 270, 58 ], "flags": {}, "order": 1, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "links": [ 58 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "VAELoader", "models": [ { "name": "ae.safetensors", "url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors", "directory": "vae" } ] }, "widgets_values": [ "ae.safetensors" ] }, { "id": 42, "type": "ConditioningZeroOut", "pos": [ -10, 460 ], "size": [ 200, 30 ], "flags": { "collapsed": false }, "order": 14, "mode": 0, "inputs": [ { "name": "conditioning", "type": "CONDITIONING", "link": 66 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 63 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "ConditioningZeroOut" }, "widgets_values": [] }, { "id": 31, "type": "KSampler", "pos": [ 10, 550 ], "size": [ 315, 262 ], "flags": {}, "order": 16, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 61 }, { "name": "positive", "type": "CONDITIONING", "link": 65 }, { "name": "negative", "type": "CONDITIONING", "link": 63 }, { "name": "latent_image", "type": "LATENT", "link": 51 } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 52 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "KSampler" }, "widgets_values": [ 516682275155174, "randomize", 20, 1, "euler", "simple", 1 ] }, { "id": 43, "type": "MarkdownNote", "pos": [ -870, 110 ], "size": [ 520, 390 ], "flags": {}, "order": 2, "mode": 0, "inputs": [], "outputs": [], "title": "Model links", "properties": {}, "widgets_values": [ "## Model links\n\n**Diffusion Model**\n\n- [flux1-krea-dev_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors)\n\nIf you need the original weights, head to [black-forest-labs/FLUX.1-Krea-dev](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/), accept the agreement in the repo, then click the link below to download the models:\n\n- [flux1-krea-dev.safetensors](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/resolve/main/flux1-krea-dev.safetensors)\n\n**Text Encoder**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors)\n\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors) or [t5xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors)\n\n**VAE**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\n```\nComfyUI/\n├── models/\n│ ├── diffusion_models/\n│ │ └─── flux1-krea-dev_fp8_scaled.safetensors\n│ ├── text_encoders/\n│ │ ├── clip_l.safetensors\n│ │ └─── t5xxl_fp16.safetensors # or t5xxl_fp8_e4m3fn_scaled.safetensors\n│ └── vae/\n│ └── ae.safetensors\n```\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 52, "type": "CFGNorm", "pos": [ 2230, 230 ], "size": [ 290, 60 ], "flags": {}, "order": 19, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 71 } ], "outputs": [ { "name": "patched_model", "type": "MODEL", "links": [ 73 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.50", "Node name for S&R": "CFGNorm", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "ue_properties": { "widget_ue_connectable": { "strength": true } } }, "widgets_values": [ 1 ] }, { "id": 55, "type": "ModelSamplingAuraFlow", "pos": [ 2230, 120 ], "size": [ 290, 60 ], "flags": {}, "order": 17, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 95 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 71 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "ModelSamplingAuraFlow", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ 3 ] }, { "id": 56, "type": "KSampler", "pos": [ 2230, 340 ], "size": [ 300, 474 ], "flags": {}, "order": 25, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 73 }, { "name": "positive", "type": "CONDITIONING", "link": 74 }, { "name": "negative", "type": "CONDITIONING", "link": 75 }, { "name": "latent_image", "type": "LATENT", "link": 76 } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 91 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "KSampler", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ 84333832884411, "randomize", 4, 1, "euler", "simple", 1 ] }, { "id": 59, "type": "EmptySD3LatentImage", "pos": [ 2240, 1110 ], "size": [ 270, 106 ], "flags": {}, "order": 3, "mode": 0, "inputs": [], "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "EmptySD3LatentImage" }, "widgets_values": [ 1024, 1024, 1 ] }, { "id": 60, "type": "VAEEncode", "pos": [ 1980, 1150 ], "size": [ 140, 46 ], "flags": {}, "order": 22, "mode": 0, "inputs": [ { "name": "pixels", "type": "IMAGE", "link": 78 }, { "name": "vae", "type": "VAE", "link": 79 } ], "outputs": [ { "name": "LATENT", "type": "LATENT", "links": [ 76 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.50", "Node name for S&R": "VAEEncode", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "ue_properties": { "widget_ue_connectable": {} } }, "widgets_values": [] }, { "id": 61, "type": "TextEncodeQwenImageEditPlus", "pos": [ 1710, 430 ], "size": [ 400, 200 ], "flags": {}, "order": 23, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 80 }, { "name": "vae", "shape": 7, "type": "VAE", "link": 81 }, { "name": "image1", "shape": 7, "type": "IMAGE", "link": 82 }, { "name": "image2", "shape": 7, "type": "IMAGE", "link": null }, { "name": "image3", "shape": 7, "type": "IMAGE", "link": null } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 75 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "TextEncodeQwenImageEditPlus" }, "widgets_values": [ "" ], "color": "#223", "bgcolor": "#335" }, { "id": 62, "type": "MarkdownNote", "pos": [ 2220, 1280 ], "size": [ 330, 90 ], "flags": {}, "order": 4, "mode": 0, "inputs": [], "outputs": [], "title": "Note: About image size", "properties": {}, "widgets_values": [ "You can use the latent from the **EmptySD3LatentImage** to replace **VAE Encode**, so you can customize the image size." ], "color": "#432", "bgcolor": "#653" }, { "id": 63, "type": "MarkdownNote", "pos": [ 2230, 860 ], "size": [ 300, 160 ], "flags": {}, "order": 5, "mode": 0, "inputs": [], "outputs": [], "title": "Note: KSampler settings", "properties": {}, "widgets_values": [ "You can test and find the best setting by yourself. The following table is for reference.\n\n| Model | Steps | CFG |\n|---------------------|---------------|---------------|\n| Offical | 50 | 4.0 \n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4steps LoRA | 4 | 1.0 |\n" ], "color": "#432", "bgcolor": "#653" }, { "id": 69, "type": "VAEDecode", "pos": [ 2570, 120 ], "size": [ 210, 46 ], "flags": { "collapsed": false }, "order": 26, "mode": 0, "inputs": [ { "name": "samples", "type": "LATENT", "link": 91 }, { "name": "vae", "type": "VAE", "link": 92 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 77 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "VAEDecode", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [] }, { "id": 72, "type": "MarkdownNote", "pos": [ 1250, 1280 ], "size": [ 290, 140 ], "flags": {}, "order": 6, "mode": 0, "inputs": [], "outputs": [], "properties": {}, "widgets_values": [ "This node is to avoid bad output results caused by excessively large input image sizes. Because when we input one image, we use the size of that input image.\n\nThe **TextEncodeQwenImageEditPlus** will scale your input to 1024×104 pixels. We use the size of your first input image. This node is to avoid having an input image size that is too large (such as 3000×3000 pixels), which could bring bad results." ], "color": "#432", "bgcolor": "#653" }, { "id": 38, "type": "UNETLoader", "pos": [ -320, 150 ], "size": [ 270, 82 ], "flags": {}, "order": 7, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 61 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "UNETLoader", "models": [ { "name": "flux1-krea-dev_fp8_scaled.safetensors", "url": "https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors", "directory": "diffusion_models" } ] }, "widgets_values": [ "flux1-krea-dev_fp8_scaled.safetensors", "default" ] }, { "id": 53, "type": "VAELoader", "pos": [ 1251.3350830078125, 769.044189453125 ], "size": [ 330, 60 ], "flags": {}, "order": 8, "mode": 0, "inputs": [], "outputs": [ { "name": "VAE", "type": "VAE", "slot_index": 0, "links": [ 79, 81, 87, 92 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "VAELoader", "models": [ { "name": "qwen_image_vae.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors", "directory": "vae" } ], "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ "qwen_image_vae.safetensors" ] }, { "id": 54, "type": "CLIPLoader", "pos": [ 1246.7269287109375, 558.9865112304688 ], "size": [ 330, 110 ], "flags": {}, "order": 9, "mode": 0, "inputs": [], "outputs": [ { "name": "CLIP", "type": "CLIP", "slot_index": 0, "links": [ 80, 86 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "CLIPLoader", "models": [ { "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors", "directory": "text_encoders" } ], "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ "qwen_2.5_vl_7b_fp8_scaled.safetensors", "qwen_image", "default" ] }, { "id": 73, "type": "LoraLoaderModelOnly", "pos": [ 1227.6541748046875, 406.69415283203125 ], "size": [ 270, 82 ], "flags": {}, "order": 15, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 94 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 95 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.64", "Node name for S&R": "LoraLoaderModelOnly" }, "widgets_values": [ "next-scene_lora_v1-3000.safetensors", 1 ] }, { "id": 65, "type": "LoraLoaderModelOnly", "pos": [ 1222.294677734375, 231.0044403076172 ], "size": [ 310, 82 ], "flags": {}, "order": 13, "mode": 0, "inputs": [ { "name": "model", "type": "MODEL", "link": 85 } ], "outputs": [ { "name": "MODEL", "type": "MODEL", "links": [ 94 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.50", "Node name for S&R": "LoraLoaderModelOnly", "models": [ { "name": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors", "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors", "directory": "loras" } ], "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "ue_properties": { "widget_ue_connectable": { "lora_name": true, "strength_model": true } } }, "widgets_values": [ "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors", 1 ] }, { "id": 57, "type": "UNETLoader", "pos": [ 1262.0018310546875, 82.82173919677734 ], "size": [ 330, 90 ], "flags": {}, "order": 10, "mode": 0, "inputs": [], "outputs": [ { "name": "MODEL", "type": "MODEL", "slot_index": 0, "links": [ 85 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "UNETLoader", "models": [ { "name": "qwen_image_edit_2509_fp8_e4m3fn.safetensors", "url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_fp8_e4m3fn.safetensors", "directory": "diffusion_models" } ], "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ "qwen_image_edit_2509_fp8_e4m3fn.safetensors", "default" ] }, { "id": 9, "type": "SaveImage", "pos": [ -68.49736022949219, 949.423583984375 ], "size": [ 640, 660 ], "flags": {}, "order": 20, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 9 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "SaveImage" }, "widgets_values": [ "flux_krea/flux_krea" ] }, { "id": 58, "type": "SaveImage", "pos": [ 592.7343139648438, 952.6732788085938 ], "size": [ 507.185302734375, 476.2288513183594 ], "flags": {}, "order": 27, "mode": 0, "inputs": [ { "name": "images", "type": "IMAGE", "link": 77 } ], "outputs": [], "properties": { "cnr_id": "comfy-core", "ver": "0.3.48", "Node name for S&R": "SaveImage", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "widget_ue_connectable": {} }, "widgets_values": [ "ComfyUI" ] }, { "id": 45, "type": "CLIPTextEncode", "pos": [ -426.0304260253906, 959.8533325195312 ], "size": [ 330, 210 ], "flags": {}, "order": 12, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 64 } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 65, 66 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.47", "Node name for S&R": "CLIPTextEncode" }, "widgets_values": [ "A realistic cinematic shot of an old lighthouse keeper standing at the edge of a cliff during a violent storm, his long coat whipping in the wind, waves crashing below, lightning flashing across the sky. dramatic lighting, cinematic realism, moody atmosphere." ] }, { "id": 68, "type": "TextEncodeQwenImageEditPlus", "pos": [ 1703.297607421875, 179.4862518310547 ], "size": [ 400, 200 ], "flags": {}, "order": 24, "mode": 0, "inputs": [ { "name": "clip", "type": "CLIP", "link": 86 }, { "name": "vae", "shape": 7, "type": "VAE", "link": 87 }, { "name": "image1", "shape": 7, "type": "IMAGE", "link": 98 }, { "name": "image2", "shape": 7, "type": "IMAGE", "link": null }, { "name": "image3", "shape": 7, "type": "IMAGE", "link": null } ], "outputs": [ { "name": "CONDITIONING", "type": "CONDITIONING", "links": [ 74 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.59", "Node name for S&R": "TextEncodeQwenImageEditPlus" }, "widgets_values": [ "Next Scene: The camera pushes in from behind the keeper, showing him gripping the rail as the storm rages and lightning illuminates his weathered face. realistic cinematic style" ], "color": "#232", "bgcolor": "#353" }, { "id": 27, "type": "EmptySD3LatentImage", "pos": [ -320, 630 ], "size": [ 270, 120 ], "flags": {}, "order": 11, "mode": 0, "inputs": [], "outputs": [ { "name": "LATENT", "type": "LATENT", "slot_index": 0, "links": [ 51 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "EmptySD3LatentImage" }, "widgets_values": [ 1280, 720, 1 ] }, { "id": 8, "type": "VAEDecode", "pos": [ 230, 470 ], "size": [ 210, 46 ], "flags": { "collapsed": false }, "order": 18, "mode": 0, "inputs": [ { "name": "samples", "type": "LATENT", "link": 52 }, { "name": "vae", "type": "VAE", "link": 58 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "slot_index": 0, "links": [ 9, 97 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.40", "Node name for S&R": "VAEDecode" }, "widgets_values": [] }, { "id": 71, "type": "ImageScaleToTotalPixels", "pos": [ 1408.750732421875, 1016.1314697265625 ], "size": [ 270, 82 ], "flags": {}, "order": 21, "mode": 0, "inputs": [ { "name": "image", "type": "IMAGE", "link": 97 } ], "outputs": [ { "name": "IMAGE", "type": "IMAGE", "links": [ 78, 82, 98 ] } ], "properties": { "cnr_id": "comfy-core", "ver": "0.3.50", "Node name for S&R": "ImageScaleToTotalPixels", "enableTabs": false, "tabWidth": 65, "tabXOffset": 10, "hasSecondTab": false, "secondTabText": "Send Back", "secondTabOffset": 80, "secondTabWidth": 65, "ue_properties": { "widget_ue_connectable": { "upscale_method": true, "megapixels": true } } }, "widgets_values": [ "lanczos", 1 ] } ], "links": [ [ 9, 8, 0, 9, 0, "IMAGE" ], [ 51, 27, 0, 31, 3, "LATENT" ], [ 52, 31, 0, 8, 0, "LATENT" ], [ 58, 39, 0, 8, 1, "VAE" ], [ 61, 38, 0, 31, 0, "MODEL" ], [ 63, 42, 0, 31, 2, "CONDITIONING" ], [ 64, 40, 0, 45, 0, "CLIP" ], [ 65, 45, 0, 31, 1, "CONDITIONING" ], [ 66, 45, 0, 42, 0, "CONDITIONING" ], [ 71, 55, 0, 52, 0, "MODEL" ], [ 73, 52, 0, 56, 0, "MODEL" ], [ 74, 68, 0, 56, 1, "CONDITIONING" ], [ 75, 61, 0, 56, 2, "CONDITIONING" ], [ 76, 60, 0, 56, 3, "LATENT" ], [ 77, 69, 0, 58, 0, "IMAGE" ], [ 78, 71, 0, 60, 0, "IMAGE" ], [ 79, 53, 0, 60, 1, "VAE" ], [ 80, 54, 0, 61, 0, "CLIP" ], [ 81, 53, 0, 61, 1, "VAE" ], [ 82, 71, 0, 61, 2, "IMAGE" ], [ 85, 57, 0, 65, 0, "MODEL" ], [ 86, 54, 0, 68, 0, "CLIP" ], [ 87, 53, 0, 68, 1, "VAE" ], [ 91, 56, 0, 69, 0, "LATENT" ], [ 92, 53, 0, 69, 1, "VAE" ], [ 94, 65, 0, 73, 0, "MODEL" ], [ 95, 73, 0, 55, 0, "MODEL" ], [ 97, 8, 0, 71, 0, "IMAGE" ], [ 98, 71, 0, 68, 2, "IMAGE" ] ], "groups": [ { "id": 1, "title": "Step 1 - Load Models Here", "bounding": [ -330, 80, 300, 460 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 2, "title": "Step 2 - Image Size", "bounding": [ -330, 560, 300, 200 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 3, "title": "Step 3 - Prompt", "bounding": [ -10, 80, 360, 333.6000061035156 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 4, "title": "Step1 - Load models", "bounding": [ 1220, 80, 370, 570 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 5, "title": "Step 2 - Upload image for editing", "bounding": [ 1220, 680, 970, 550 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 6, "title": "Step 4 - Prompt", "bounding": [ 1620, 80, 570, 570 ], "color": "#3f789e", "font_size": 24, "flags": {} }, { "id": 7, "title": "Step3 - Image Size", "bounding": [ 2220, 1030, 310, 200 ], "color": "#3f789e", "font_size": 24, "flags": {} } ], "config": {}, "extra": { "ds": { "scale": 0.7187434554973852, "offset": [ 1205.8930184858773, -570.9619526574623 ] }, "frontendVersion": "1.27.10", "VHS_latentpreview": false, "VHS_latentpreviewrate": 0, "VHS_MetadataImage": true, "VHS_KeepIntermediate": true }, "version": 0.4 }