svjack commited on
Commit
0a71b43
·
verified ·
1 Parent(s): 660693c

Upload qwen_image_union_control_anime_style.json

Browse files
qwen_image_union_control_anime_style.json ADDED
@@ -0,0 +1,1207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "00000000-0000-0000-0000-000000000000",
3
+ "revision": 0,
4
+ "last_node_id": 84,
5
+ "last_link_id": 50,
6
+ "nodes": [
7
+ {
8
+ "id": 7,
9
+ "type": "CLIPTextEncode",
10
+ "pos": [
11
+ 420,
12
+ 710
13
+ ],
14
+ "size": [
15
+ 400,
16
+ 150
17
+ ],
18
+ "flags": {},
19
+ "order": 9,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "clip",
24
+ "type": "CLIP",
25
+ "link": 25
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "CONDITIONING",
31
+ "type": "CONDITIONING",
32
+ "links": [
33
+ 33
34
+ ]
35
+ }
36
+ ],
37
+ "title": "CLIP Text Encode (Negative Prompt)",
38
+ "properties": {
39
+ "cnr_id": "comfy-core",
40
+ "ver": "0.3.51",
41
+ "Node name for S&R": "CLIPTextEncode"
42
+ },
43
+ "widgets_values": [
44
+ " "
45
+ ],
46
+ "color": "#223",
47
+ "bgcolor": "#335"
48
+ },
49
+ {
50
+ "id": 74,
51
+ "type": "Canny",
52
+ "pos": [
53
+ 440,
54
+ 1000
55
+ ],
56
+ "size": [
57
+ 350,
58
+ 82
59
+ ],
60
+ "flags": {},
61
+ "order": 12,
62
+ "mode": 0,
63
+ "inputs": [
64
+ {
65
+ "name": "image",
66
+ "type": "IMAGE",
67
+ "link": 42
68
+ }
69
+ ],
70
+ "outputs": [
71
+ {
72
+ "name": "IMAGE",
73
+ "type": "IMAGE",
74
+ "links": [
75
+ 35,
76
+ 38
77
+ ]
78
+ }
79
+ ],
80
+ "properties": {
81
+ "cnr_id": "comfy-core",
82
+ "ver": "0.3.51",
83
+ "Node name for S&R": "Canny"
84
+ },
85
+ "widgets_values": [
86
+ 0.4,
87
+ 0.8
88
+ ]
89
+ },
90
+ {
91
+ "id": 75,
92
+ "type": "PreviewImage",
93
+ "pos": [
94
+ 450,
95
+ 1140
96
+ ],
97
+ "size": [
98
+ 330,
99
+ 290
100
+ ],
101
+ "flags": {},
102
+ "order": 15,
103
+ "mode": 0,
104
+ "inputs": [
105
+ {
106
+ "name": "images",
107
+ "type": "IMAGE",
108
+ "link": 38
109
+ }
110
+ ],
111
+ "outputs": [],
112
+ "properties": {
113
+ "cnr_id": "comfy-core",
114
+ "ver": "0.3.51",
115
+ "Node name for S&R": "PreviewImage"
116
+ },
117
+ "widgets_values": []
118
+ },
119
+ {
120
+ "id": 70,
121
+ "type": "ReferenceLatent",
122
+ "pos": [
123
+ 860,
124
+ 470
125
+ ],
126
+ "size": [
127
+ 197.712890625,
128
+ 46
129
+ ],
130
+ "flags": {},
131
+ "order": 17,
132
+ "mode": 0,
133
+ "inputs": [
134
+ {
135
+ "name": "conditioning",
136
+ "type": "CONDITIONING",
137
+ "link": 31
138
+ },
139
+ {
140
+ "name": "latent",
141
+ "shape": 7,
142
+ "type": "LATENT",
143
+ "link": 32
144
+ }
145
+ ],
146
+ "outputs": [
147
+ {
148
+ "name": "CONDITIONING",
149
+ "type": "CONDITIONING",
150
+ "links": [
151
+ 21
152
+ ]
153
+ }
154
+ ],
155
+ "properties": {
156
+ "cnr_id": "comfy-core",
157
+ "ver": "0.3.51",
158
+ "Node name for S&R": "ReferenceLatent"
159
+ },
160
+ "widgets_values": []
161
+ },
162
+ {
163
+ "id": 71,
164
+ "type": "ReferenceLatent",
165
+ "pos": [
166
+ 850,
167
+ 720
168
+ ],
169
+ "size": [
170
+ 197.712890625,
171
+ 46
172
+ ],
173
+ "flags": {},
174
+ "order": 18,
175
+ "mode": 0,
176
+ "inputs": [
177
+ {
178
+ "name": "conditioning",
179
+ "type": "CONDITIONING",
180
+ "link": 33
181
+ },
182
+ {
183
+ "name": "latent",
184
+ "shape": 7,
185
+ "type": "LATENT",
186
+ "link": 34
187
+ }
188
+ ],
189
+ "outputs": [
190
+ {
191
+ "name": "CONDITIONING",
192
+ "type": "CONDITIONING",
193
+ "links": [
194
+ 22
195
+ ]
196
+ }
197
+ ],
198
+ "properties": {
199
+ "cnr_id": "comfy-core",
200
+ "ver": "0.3.51",
201
+ "Node name for S&R": "ReferenceLatent"
202
+ },
203
+ "widgets_values": []
204
+ },
205
+ {
206
+ "id": 72,
207
+ "type": "VAEEncode",
208
+ "pos": [
209
+ 900,
210
+ 950
211
+ ],
212
+ "size": [
213
+ 140,
214
+ 46
215
+ ],
216
+ "flags": {},
217
+ "order": 14,
218
+ "mode": 0,
219
+ "inputs": [
220
+ {
221
+ "name": "pixels",
222
+ "type": "IMAGE",
223
+ "link": 35
224
+ },
225
+ {
226
+ "name": "vae",
227
+ "type": "VAE",
228
+ "link": 36
229
+ }
230
+ ],
231
+ "outputs": [
232
+ {
233
+ "name": "LATENT",
234
+ "type": "LATENT",
235
+ "links": [
236
+ 32,
237
+ 34,
238
+ 44
239
+ ]
240
+ }
241
+ ],
242
+ "properties": {
243
+ "cnr_id": "comfy-core",
244
+ "ver": "0.3.51",
245
+ "Node name for S&R": "VAEEncode"
246
+ },
247
+ "widgets_values": []
248
+ },
249
+ {
250
+ "id": 77,
251
+ "type": "ImageScaleToTotalPixels",
252
+ "pos": [
253
+ 60,
254
+ 1220
255
+ ],
256
+ "size": [
257
+ 270,
258
+ 82
259
+ ],
260
+ "flags": {},
261
+ "order": 10,
262
+ "mode": 0,
263
+ "inputs": [
264
+ {
265
+ "name": "image",
266
+ "type": "IMAGE",
267
+ "link": 41
268
+ }
269
+ ],
270
+ "outputs": [
271
+ {
272
+ "name": "IMAGE",
273
+ "type": "IMAGE",
274
+ "links": [
275
+ 42
276
+ ]
277
+ }
278
+ ],
279
+ "properties": {
280
+ "cnr_id": "comfy-core",
281
+ "ver": "0.3.51",
282
+ "Node name for S&R": "ImageScaleToTotalPixels"
283
+ },
284
+ "widgets_values": [
285
+ "lanczos",
286
+ 1
287
+ ]
288
+ },
289
+ {
290
+ "id": 82,
291
+ "type": "MarkdownNote",
292
+ "pos": [
293
+ 60,
294
+ 1350
295
+ ],
296
+ "size": [
297
+ 270,
298
+ 120
299
+ ],
300
+ "flags": {},
301
+ "order": 0,
302
+ "mode": 0,
303
+ "inputs": [],
304
+ "outputs": [],
305
+ "title": "About Scale Image to Total Pixels",
306
+ "properties": {},
307
+ "widgets_values": [
308
+ "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
309
+ ],
310
+ "color": "#432",
311
+ "bgcolor": "#653"
312
+ },
313
+ {
314
+ "id": 81,
315
+ "type": "MarkdownNote",
316
+ "pos": [
317
+ 1100,
318
+ 780
319
+ ],
320
+ "size": [
321
+ 260,
322
+ 150
323
+ ],
324
+ "flags": {},
325
+ "order": 1,
326
+ "mode": 0,
327
+ "inputs": [],
328
+ "outputs": [],
329
+ "title": "KSampler settings",
330
+ "properties": {},
331
+ "widgets_values": [
332
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
333
+ ],
334
+ "color": "#432",
335
+ "bgcolor": "#653"
336
+ },
337
+ {
338
+ "id": 66,
339
+ "type": "ModelSamplingAuraFlow",
340
+ "pos": [
341
+ 1100,
342
+ 170
343
+ ],
344
+ "size": [
345
+ 260,
346
+ 58
347
+ ],
348
+ "flags": {},
349
+ "order": 19,
350
+ "mode": 0,
351
+ "inputs": [
352
+ {
353
+ "name": "model",
354
+ "type": "MODEL",
355
+ "link": 48
356
+ }
357
+ ],
358
+ "outputs": [
359
+ {
360
+ "name": "MODEL",
361
+ "type": "MODEL",
362
+ "links": [
363
+ 20
364
+ ]
365
+ }
366
+ ],
367
+ "properties": {
368
+ "cnr_id": "comfy-core",
369
+ "ver": "0.3.51",
370
+ "Node name for S&R": "ModelSamplingAuraFlow"
371
+ },
372
+ "widgets_values": [
373
+ 3.1
374
+ ]
375
+ },
376
+ {
377
+ "id": 8,
378
+ "type": "VAEDecode",
379
+ "pos": [
380
+ 1400,
381
+ 170
382
+ ],
383
+ "size": [
384
+ 140,
385
+ 46
386
+ ],
387
+ "flags": {},
388
+ "order": 21,
389
+ "mode": 0,
390
+ "inputs": [
391
+ {
392
+ "name": "samples",
393
+ "type": "LATENT",
394
+ "link": 26
395
+ },
396
+ {
397
+ "name": "vae",
398
+ "type": "VAE",
399
+ "link": 27
400
+ }
401
+ ],
402
+ "outputs": [
403
+ {
404
+ "name": "IMAGE",
405
+ "type": "IMAGE",
406
+ "links": [
407
+ 28
408
+ ]
409
+ }
410
+ ],
411
+ "properties": {
412
+ "cnr_id": "comfy-core",
413
+ "ver": "0.3.51",
414
+ "Node name for S&R": "VAEDecode"
415
+ },
416
+ "widgets_values": []
417
+ },
418
+ {
419
+ "id": 80,
420
+ "type": "MarkdownNote",
421
+ "pos": [
422
+ -560,
423
+ 160
424
+ ],
425
+ "size": [
426
+ 540,
427
+ 630
428
+ ],
429
+ "flags": {},
430
+ "order": 2,
431
+ "mode": 0,
432
+ "inputs": [],
433
+ "outputs": [],
434
+ "title": "Model links",
435
+ "properties": {
436
+ "widget_ue_connectable": {}
437
+ },
438
+ "widgets_values": [
439
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
440
+ ],
441
+ "color": "#432",
442
+ "bgcolor": "#653"
443
+ },
444
+ {
445
+ "id": 69,
446
+ "type": "LoraLoaderModelOnly",
447
+ "pos": [
448
+ 30,
449
+ 360
450
+ ],
451
+ "size": [
452
+ 330,
453
+ 82
454
+ ],
455
+ "flags": {},
456
+ "order": 7,
457
+ "mode": 0,
458
+ "inputs": [
459
+ {
460
+ "name": "model",
461
+ "type": "MODEL",
462
+ "link": 30
463
+ }
464
+ ],
465
+ "outputs": [
466
+ {
467
+ "name": "MODEL",
468
+ "type": "MODEL",
469
+ "links": [
470
+ 45
471
+ ]
472
+ }
473
+ ],
474
+ "properties": {
475
+ "cnr_id": "comfy-core",
476
+ "ver": "0.3.51",
477
+ "Node name for S&R": "LoraLoaderModelOnly",
478
+ "models": [
479
+ {
480
+ "name": "qwen_image_union_diffsynth_lora.safetensors",
481
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
482
+ "directory": "loras"
483
+ }
484
+ ]
485
+ },
486
+ "widgets_values": [
487
+ "qwen_image_union_diffsynth_lora.safetensors",
488
+ 1
489
+ ]
490
+ },
491
+ {
492
+ "id": 37,
493
+ "type": "UNETLoader",
494
+ "pos": [
495
+ 30,
496
+ 220
497
+ ],
498
+ "size": [
499
+ 330,
500
+ 82
501
+ ],
502
+ "flags": {},
503
+ "order": 3,
504
+ "mode": 0,
505
+ "inputs": [],
506
+ "outputs": [
507
+ {
508
+ "name": "MODEL",
509
+ "type": "MODEL",
510
+ "links": [
511
+ 30
512
+ ]
513
+ }
514
+ ],
515
+ "properties": {
516
+ "cnr_id": "comfy-core",
517
+ "ver": "0.3.51",
518
+ "Node name for S&R": "UNETLoader",
519
+ "models": [
520
+ {
521
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
522
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
523
+ "directory": "diffusion_models"
524
+ }
525
+ ]
526
+ },
527
+ "widgets_values": [
528
+ "qwen_image_fp8_e4m3fn.safetensors",
529
+ "default"
530
+ ]
531
+ },
532
+ {
533
+ "id": 38,
534
+ "type": "CLIPLoader",
535
+ "pos": [
536
+ 30,
537
+ 490
538
+ ],
539
+ "size": [
540
+ 330,
541
+ 110
542
+ ],
543
+ "flags": {},
544
+ "order": 4,
545
+ "mode": 0,
546
+ "inputs": [],
547
+ "outputs": [
548
+ {
549
+ "name": "CLIP",
550
+ "type": "CLIP",
551
+ "links": [
552
+ 24,
553
+ 25
554
+ ]
555
+ }
556
+ ],
557
+ "properties": {
558
+ "cnr_id": "comfy-core",
559
+ "ver": "0.3.51",
560
+ "Node name for S&R": "CLIPLoader",
561
+ "models": [
562
+ {
563
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
564
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
565
+ "directory": "text_encoders"
566
+ }
567
+ ]
568
+ },
569
+ "widgets_values": [
570
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
571
+ "qwen_image",
572
+ "default"
573
+ ]
574
+ },
575
+ {
576
+ "id": 39,
577
+ "type": "VAELoader",
578
+ "pos": [
579
+ 30,
580
+ 650
581
+ ],
582
+ "size": [
583
+ 330,
584
+ 58
585
+ ],
586
+ "flags": {},
587
+ "order": 5,
588
+ "mode": 0,
589
+ "inputs": [],
590
+ "outputs": [
591
+ {
592
+ "name": "VAE",
593
+ "type": "VAE",
594
+ "links": [
595
+ 27,
596
+ 36
597
+ ]
598
+ }
599
+ ],
600
+ "properties": {
601
+ "cnr_id": "comfy-core",
602
+ "ver": "0.3.51",
603
+ "Node name for S&R": "VAELoader",
604
+ "models": [
605
+ {
606
+ "name": "qwen_image_vae.safetensors",
607
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
608
+ "directory": "vae"
609
+ }
610
+ ]
611
+ },
612
+ "widgets_values": [
613
+ "qwen_image_vae.safetensors"
614
+ ]
615
+ },
616
+ {
617
+ "id": 3,
618
+ "type": "KSampler",
619
+ "pos": [
620
+ 1100,
621
+ 280
622
+ ],
623
+ "size": [
624
+ 260,
625
+ 450
626
+ ],
627
+ "flags": {},
628
+ "order": 20,
629
+ "mode": 0,
630
+ "inputs": [
631
+ {
632
+ "name": "model",
633
+ "type": "MODEL",
634
+ "link": 20
635
+ },
636
+ {
637
+ "name": "positive",
638
+ "type": "CONDITIONING",
639
+ "link": 21
640
+ },
641
+ {
642
+ "name": "negative",
643
+ "type": "CONDITIONING",
644
+ "link": 22
645
+ },
646
+ {
647
+ "name": "latent_image",
648
+ "type": "LATENT",
649
+ "link": 44
650
+ }
651
+ ],
652
+ "outputs": [
653
+ {
654
+ "name": "LATENT",
655
+ "type": "LATENT",
656
+ "links": [
657
+ 26
658
+ ]
659
+ }
660
+ ],
661
+ "properties": {
662
+ "cnr_id": "comfy-core",
663
+ "ver": "0.3.51",
664
+ "Node name for S&R": "KSampler"
665
+ },
666
+ "widgets_values": [
667
+ 574037954234946,
668
+ "randomize",
669
+ 4,
670
+ 1,
671
+ "euler",
672
+ "simple",
673
+ 1
674
+ ]
675
+ },
676
+ {
677
+ "id": 83,
678
+ "type": "LoraLoaderModelOnly",
679
+ "pos": [
680
+ 491.74090576171875,
681
+ -148.19822692871094
682
+ ],
683
+ "size": [
684
+ 470,
685
+ 82
686
+ ],
687
+ "flags": {},
688
+ "order": 16,
689
+ "mode": 0,
690
+ "inputs": [
691
+ {
692
+ "name": "model",
693
+ "type": "MODEL",
694
+ "link": 50
695
+ }
696
+ ],
697
+ "outputs": [
698
+ {
699
+ "name": "MODEL",
700
+ "type": "MODEL",
701
+ "links": [
702
+ 48
703
+ ]
704
+ }
705
+ ],
706
+ "properties": {
707
+ "cnr_id": "comfy-core",
708
+ "ver": "0.3.51",
709
+ "Node name for S&R": "LoraLoaderModelOnly",
710
+ "models": [
711
+ {
712
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
713
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
714
+ "directory": "loras"
715
+ }
716
+ ]
717
+ },
718
+ "widgets_values": [
719
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
720
+ 1
721
+ ]
722
+ },
723
+ {
724
+ "id": 79,
725
+ "type": "LoraLoaderModelOnly",
726
+ "pos": [
727
+ 480.61572265625,
728
+ 208.0110626220703
729
+ ],
730
+ "size": [
731
+ 470,
732
+ 82
733
+ ],
734
+ "flags": {},
735
+ "order": 11,
736
+ "mode": 4,
737
+ "inputs": [
738
+ {
739
+ "name": "model",
740
+ "type": "MODEL",
741
+ "link": 45
742
+ }
743
+ ],
744
+ "outputs": [
745
+ {
746
+ "name": "MODEL",
747
+ "type": "MODEL",
748
+ "links": [
749
+ 49
750
+ ]
751
+ }
752
+ ],
753
+ "properties": {
754
+ "cnr_id": "comfy-core",
755
+ "ver": "0.3.51",
756
+ "Node name for S&R": "LoraLoaderModelOnly",
757
+ "models": [
758
+ {
759
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
760
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
761
+ "directory": "loras"
762
+ }
763
+ ]
764
+ },
765
+ "widgets_values": [
766
+ "qwen_image_infinite_future_lora_v1_000002750.safetensors",
767
+ 1
768
+ ]
769
+ },
770
+ {
771
+ "id": 60,
772
+ "type": "SaveImage",
773
+ "pos": [
774
+ 1496.753173828125,
775
+ 323.2829284667969
776
+ ],
777
+ "size": [
778
+ 1030,
779
+ 1150
780
+ ],
781
+ "flags": {},
782
+ "order": 22,
783
+ "mode": 0,
784
+ "inputs": [
785
+ {
786
+ "name": "images",
787
+ "type": "IMAGE",
788
+ "link": 28
789
+ }
790
+ ],
791
+ "outputs": [],
792
+ "properties": {
793
+ "cnr_id": "comfy-core",
794
+ "ver": "0.3.51"
795
+ },
796
+ "widgets_values": [
797
+ "ComfyUI"
798
+ ]
799
+ },
800
+ {
801
+ "id": 84,
802
+ "type": "LoraLoaderModelOnly",
803
+ "pos": [
804
+ 495.40960693359375,
805
+ 11.900715827941895
806
+ ],
807
+ "size": [
808
+ 470,
809
+ 82
810
+ ],
811
+ "flags": {},
812
+ "order": 13,
813
+ "mode": 0,
814
+ "inputs": [
815
+ {
816
+ "name": "model",
817
+ "type": "MODEL",
818
+ "link": 49
819
+ }
820
+ ],
821
+ "outputs": [
822
+ {
823
+ "name": "MODEL",
824
+ "type": "MODEL",
825
+ "links": [
826
+ 50
827
+ ]
828
+ }
829
+ ],
830
+ "properties": {
831
+ "cnr_id": "comfy-core",
832
+ "ver": "0.3.51",
833
+ "Node name for S&R": "LoraLoaderModelOnly",
834
+ "models": [
835
+ {
836
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
837
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
838
+ "directory": "loras"
839
+ }
840
+ ]
841
+ },
842
+ "widgets_values": [
843
+ "qwen_image_anime_landscape_lora_v1_000002000.safetensors",
844
+ 1.5
845
+ ]
846
+ },
847
+ {
848
+ "id": 73,
849
+ "type": "LoadImage",
850
+ "pos": [
851
+ 60,
852
+ 860
853
+ ],
854
+ "size": [
855
+ 274.080078125,
856
+ 314.00006103515625
857
+ ],
858
+ "flags": {},
859
+ "order": 6,
860
+ "mode": 0,
861
+ "inputs": [],
862
+ "outputs": [
863
+ {
864
+ "name": "IMAGE",
865
+ "type": "IMAGE",
866
+ "links": [
867
+ 41
868
+ ]
869
+ },
870
+ {
871
+ "name": "MASK",
872
+ "type": "MASK",
873
+ "links": null
874
+ }
875
+ ],
876
+ "properties": {
877
+ "cnr_id": "comfy-core",
878
+ "ver": "0.3.51",
879
+ "Node name for S&R": "LoadImage"
880
+ },
881
+ "widgets_values": [
882
+ "Belgium.jpg",
883
+ "image"
884
+ ]
885
+ },
886
+ {
887
+ "id": 6,
888
+ "type": "CLIPTextEncode",
889
+ "pos": [
890
+ 420,
891
+ 460
892
+ ],
893
+ "size": [
894
+ 400,
895
+ 200
896
+ ],
897
+ "flags": {},
898
+ "order": 8,
899
+ "mode": 0,
900
+ "inputs": [
901
+ {
902
+ "name": "clip",
903
+ "type": "CLIP",
904
+ "link": 24
905
+ }
906
+ ],
907
+ "outputs": [
908
+ {
909
+ "name": "CONDITIONING",
910
+ "type": "CONDITIONING",
911
+ "links": [
912
+ 31
913
+ ]
914
+ }
915
+ ],
916
+ "title": "CLIP Text Encode (Positive Prompt)",
917
+ "properties": {
918
+ "cnr_id": "comfy-core",
919
+ "ver": "0.3.51",
920
+ "Node name for S&R": "CLIPTextEncode"
921
+ },
922
+ "widgets_values": [
923
+ "anime style. This enchanting anime-style illustration reimagines the historic cityscape with a vibrant and magical color palette. The grand Gothic architecture, once muted in beige and light brown, now gleams in a joyful spectrum of hues. The central clock tower stands proudly, painted in soft shades of lavender and peach, while the surrounding row houses are adorned in cheerful pastels—mint green, sky blue, and buttery yellow.\n\nTheir intricate facades and ornate decorations are accentuated with delicate gold and silver highlights, making the entire scene shimmer with life. Above, a brilliant, semi-translucent rainbow arches across a soft blue sky dotted with fluffy white clouds, reflecting the newfound energy of the city below.\n\nThe overall composition retains its majestic and whimsical anime charm, but now feels even more lively and fantastical, as if from a beloved storybook where history and magic playfully coexist."
924
+ ],
925
+ "color": "#232",
926
+ "bgcolor": "#353"
927
+ }
928
+ ],
929
+ "links": [
930
+ [
931
+ 20,
932
+ 66,
933
+ 0,
934
+ 3,
935
+ 0,
936
+ "MODEL"
937
+ ],
938
+ [
939
+ 21,
940
+ 70,
941
+ 0,
942
+ 3,
943
+ 1,
944
+ "CONDITIONING"
945
+ ],
946
+ [
947
+ 22,
948
+ 71,
949
+ 0,
950
+ 3,
951
+ 2,
952
+ "CONDITIONING"
953
+ ],
954
+ [
955
+ 24,
956
+ 38,
957
+ 0,
958
+ 6,
959
+ 0,
960
+ "CLIP"
961
+ ],
962
+ [
963
+ 25,
964
+ 38,
965
+ 0,
966
+ 7,
967
+ 0,
968
+ "CLIP"
969
+ ],
970
+ [
971
+ 26,
972
+ 3,
973
+ 0,
974
+ 8,
975
+ 0,
976
+ "LATENT"
977
+ ],
978
+ [
979
+ 27,
980
+ 39,
981
+ 0,
982
+ 8,
983
+ 1,
984
+ "VAE"
985
+ ],
986
+ [
987
+ 28,
988
+ 8,
989
+ 0,
990
+ 60,
991
+ 0,
992
+ "IMAGE"
993
+ ],
994
+ [
995
+ 30,
996
+ 37,
997
+ 0,
998
+ 69,
999
+ 0,
1000
+ "MODEL"
1001
+ ],
1002
+ [
1003
+ 31,
1004
+ 6,
1005
+ 0,
1006
+ 70,
1007
+ 0,
1008
+ "CONDITIONING"
1009
+ ],
1010
+ [
1011
+ 32,
1012
+ 72,
1013
+ 0,
1014
+ 70,
1015
+ 1,
1016
+ "LATENT"
1017
+ ],
1018
+ [
1019
+ 33,
1020
+ 7,
1021
+ 0,
1022
+ 71,
1023
+ 0,
1024
+ "CONDITIONING"
1025
+ ],
1026
+ [
1027
+ 34,
1028
+ 72,
1029
+ 0,
1030
+ 71,
1031
+ 1,
1032
+ "LATENT"
1033
+ ],
1034
+ [
1035
+ 35,
1036
+ 74,
1037
+ 0,
1038
+ 72,
1039
+ 0,
1040
+ "IMAGE"
1041
+ ],
1042
+ [
1043
+ 36,
1044
+ 39,
1045
+ 0,
1046
+ 72,
1047
+ 1,
1048
+ "VAE"
1049
+ ],
1050
+ [
1051
+ 38,
1052
+ 74,
1053
+ 0,
1054
+ 75,
1055
+ 0,
1056
+ "IMAGE"
1057
+ ],
1058
+ [
1059
+ 41,
1060
+ 73,
1061
+ 0,
1062
+ 77,
1063
+ 0,
1064
+ "IMAGE"
1065
+ ],
1066
+ [
1067
+ 42,
1068
+ 77,
1069
+ 0,
1070
+ 74,
1071
+ 0,
1072
+ "IMAGE"
1073
+ ],
1074
+ [
1075
+ 44,
1076
+ 72,
1077
+ 0,
1078
+ 3,
1079
+ 3,
1080
+ "LATENT"
1081
+ ],
1082
+ [
1083
+ 45,
1084
+ 69,
1085
+ 0,
1086
+ 79,
1087
+ 0,
1088
+ "MODEL"
1089
+ ],
1090
+ [
1091
+ 48,
1092
+ 83,
1093
+ 0,
1094
+ 66,
1095
+ 0,
1096
+ "MODEL"
1097
+ ],
1098
+ [
1099
+ 49,
1100
+ 79,
1101
+ 0,
1102
+ 84,
1103
+ 0,
1104
+ "MODEL"
1105
+ ],
1106
+ [
1107
+ 50,
1108
+ 84,
1109
+ 0,
1110
+ 83,
1111
+ 0,
1112
+ "MODEL"
1113
+ ]
1114
+ ],
1115
+ "groups": [
1116
+ {
1117
+ "id": 1,
1118
+ "title": "Step 1 - Load models",
1119
+ "bounding": [
1120
+ 10,
1121
+ 130,
1122
+ 370,
1123
+ 620
1124
+ ],
1125
+ "color": "#3f789e",
1126
+ "font_size": 24,
1127
+ "flags": {}
1128
+ },
1129
+ {
1130
+ "id": 2,
1131
+ "title": "Step 2 - Upload reference image",
1132
+ "bounding": [
1133
+ 10,
1134
+ 770,
1135
+ 370,
1136
+ 730
1137
+ ],
1138
+ "color": "#3f789e",
1139
+ "font_size": 24,
1140
+ "flags": {}
1141
+ },
1142
+ {
1143
+ "id": 5,
1144
+ "title": "Conditioning",
1145
+ "bounding": [
1146
+ 400,
1147
+ 330,
1148
+ 680,
1149
+ 570
1150
+ ],
1151
+ "color": "#3f789e",
1152
+ "font_size": 24,
1153
+ "flags": {}
1154
+ },
1155
+ {
1156
+ "id": 3,
1157
+ "title": "Step 3 - Prompt",
1158
+ "bounding": [
1159
+ 410,
1160
+ 390,
1161
+ 420,
1162
+ 490
1163
+ ],
1164
+ "color": "#3f789e",
1165
+ "font_size": 24,
1166
+ "flags": {}
1167
+ },
1168
+ {
1169
+ "id": 4,
1170
+ "title": "Image Processing",
1171
+ "bounding": [
1172
+ 410,
1173
+ 920,
1174
+ 410,
1175
+ 573.5999755859375
1176
+ ],
1177
+ "color": "#3f789e",
1178
+ "font_size": 24,
1179
+ "flags": {}
1180
+ },
1181
+ {
1182
+ "id": 6,
1183
+ "title": "4 steps lighting LoRA",
1184
+ "bounding": [
1185
+ 400,
1186
+ 130,
1187
+ 680,
1188
+ 180
1189
+ ],
1190
+ "color": "#3f789e",
1191
+ "font_size": 24,
1192
+ "flags": {}
1193
+ }
1194
+ ],
1195
+ "config": {},
1196
+ "extra": {
1197
+ "ds": {
1198
+ "scale": 0.6558050095434231,
1199
+ "offset": [
1200
+ 33.24225578156965,
1201
+ 231.82715177716318
1202
+ ]
1203
+ },
1204
+ "frontendVersion": "1.25.10"
1205
+ },
1206
+ "version": 0.4
1207
+ }