nvn04 commited on
Commit
ed299f7
·
verified ·
1 Parent(s): e57c12d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -241
app.py CHANGED
@@ -383,247 +383,6 @@ def app_gradio():
383
  result_image,
384
  )
385
 
386
- with gr.Tab("Mask-based & Flux.1 Fill Dev"):
387
- with gr.Row():
388
- with gr.Column(scale=1, min_width=350):
389
- with gr.Row():
390
- image_path_flux = gr.Image(
391
- type="filepath",
392
- interactive=True,
393
- visible=False,
394
- )
395
- person_image_flux = gr.ImageEditor(
396
- interactive=True, label="Person Image", type="filepath"
397
- )
398
-
399
- with gr.Row():
400
- with gr.Column(scale=1, min_width=230):
401
- cloth_image_flux = gr.Image(
402
- interactive=True, label="Condition Image", type="filepath"
403
- )
404
- with gr.Column(scale=1, min_width=120):
405
- gr.Markdown(
406
- '<span style="color: #808080; font-size: small;">Two ways to provide Mask:<br>1. Upload the person image and use the `🖌️` above to draw the Mask (higher priority)<br>2. Select the `Try-On Cloth Type` to generate automatically </span>'
407
- )
408
- cloth_type = gr.Radio(
409
- label="Try-On Cloth Type",
410
- choices=["upper", "lower", "overall"],
411
- value="upper",
412
- )
413
-
414
- submit_flux = gr.Button("Submit")
415
- gr.Markdown(
416
- '<center><span style="color: #FF0000">!!! Click only Once, Wait for Delay !!!</span></center>'
417
- )
418
-
419
- with gr.Accordion("Advanced Options", open=False):
420
- num_inference_steps_flux = gr.Slider(
421
- label="Inference Step", minimum=10, maximum=100, step=5, value=50
422
- )
423
- # Guidence Scale
424
- guidance_scale_flux = gr.Slider(
425
- label="CFG Strenth", minimum=0.0, maximum=50, step=0.5, value=30
426
- )
427
- # Random Seed
428
- seed_flux = gr.Slider(
429
- label="Seed", minimum=-1, maximum=10000, step=1, value=42
430
- )
431
- show_type = gr.Radio(
432
- label="Show Type",
433
- choices=["result only", "input & result", "input & mask & result"],
434
- value="input & mask & result",
435
- )
436
-
437
- with gr.Column(scale=2, min_width=500):
438
- result_image_flux = gr.Image(interactive=False, label="Result")
439
- with gr.Row():
440
- # Photo Examples
441
- root_path = "resource/demo/example"
442
- with gr.Column():
443
- gr.Examples(
444
- examples=[
445
- os.path.join(root_path, "person", "men", _)
446
- for _ in os.listdir(os.path.join(root_path, "person", "men"))
447
- ],
448
- examples_per_page=4,
449
- inputs=image_path_flux,
450
- label="Person Examples ①",
451
- )
452
- gr.Examples(
453
- examples=[
454
- os.path.join(root_path, "person", "women", _)
455
- for _ in os.listdir(os.path.join(root_path, "person", "women"))
456
- ],
457
- examples_per_page=4,
458
- inputs=image_path_flux,
459
- label="Person Examples ②",
460
- )
461
- gr.Markdown(
462
- '<span style="color: #808080; font-size: small;">*Person examples come from the demos of <a href="https://huggingface.co/spaces/levihsu/OOTDiffusion">OOTDiffusion</a> and <a href="https://www.outfitanyone.org">OutfitAnyone</a>. </span>'
463
- )
464
- with gr.Column():
465
- gr.Examples(
466
- examples=[
467
- os.path.join(root_path, "condition", "upper", _)
468
- for _ in os.listdir(os.path.join(root_path, "condition", "upper"))
469
- ],
470
- examples_per_page=4,
471
- inputs=cloth_image_flux,
472
- label="Condition Upper Examples",
473
- )
474
- gr.Examples(
475
- examples=[
476
- os.path.join(root_path, "condition", "overall", _)
477
- for _ in os.listdir(os.path.join(root_path, "condition", "overall"))
478
- ],
479
- examples_per_page=4,
480
- inputs=cloth_image_flux,
481
- label="Condition Overall Examples",
482
- )
483
- condition_person_exm = gr.Examples(
484
- examples=[
485
- os.path.join(root_path, "condition", "person", _)
486
- for _ in os.listdir(os.path.join(root_path, "condition", "person"))
487
- ],
488
- examples_per_page=4,
489
- inputs=cloth_image_flux,
490
- label="Condition Reference Person Examples",
491
- )
492
- gr.Markdown(
493
- '<span style="color: #808080; font-size: small;">*Condition examples come from the Internet. </span>'
494
- )
495
-
496
-
497
- image_path_flux.change(
498
- person_example_fn, inputs=image_path_flux, outputs=person_image_flux
499
- )
500
-
501
- submit_flux.click(
502
- submit_function_flux,
503
- [person_image_flux, cloth_image_flux, cloth_type, num_inference_steps_flux, guidance_scale_flux, seed_flux, show_type],
504
- result_image_flux,
505
- )
506
-
507
-
508
- with gr.Tab("Mask-free & SD1.5"):
509
- with gr.Row():
510
- with gr.Column(scale=1, min_width=350):
511
- with gr.Row():
512
- image_path_p2p = gr.Image(
513
- type="filepath",
514
- interactive=True,
515
- visible=False,
516
- )
517
- person_image_p2p = gr.ImageEditor(
518
- interactive=True, label="Person Image", type="filepath"
519
- )
520
-
521
- with gr.Row():
522
- with gr.Column(scale=1, min_width=230):
523
- cloth_image_p2p = gr.Image(
524
- interactive=True, label="Condition Image", type="filepath"
525
- )
526
-
527
- submit_p2p = gr.Button("Submit")
528
- gr.Markdown(
529
- '<center><span style="color: #FF0000">!!! Click only Once, Wait for Delay !!!</span></center>'
530
- )
531
-
532
- gr.Markdown(
533
- '<span style="color: #808080; font-size: small;">Advanced options can adjust details:<br>1. `Inference Step` may enhance details;<br>2. `CFG` is highly correlated with saturation;<br>3. `Random seed` may improve pseudo-shadow.</span>'
534
- )
535
- with gr.Accordion("Advanced Options", open=False):
536
- num_inference_steps_p2p = gr.Slider(
537
- label="Inference Step", minimum=10, maximum=100, step=5, value=50
538
- )
539
- # Guidence Scale
540
- guidance_scale_p2p = gr.Slider(
541
- label="CFG Strenth", minimum=0.0, maximum=7.5, step=0.5, value=2.5
542
- )
543
- # Random Seed
544
- seed_p2p = gr.Slider(
545
- label="Seed", minimum=-1, maximum=10000, step=1, value=42
546
- )
547
- # show_type = gr.Radio(
548
- # label="Show Type",
549
- # choices=["result only", "input & result", "input & mask & result"],
550
- # value="input & mask & result",
551
- # )
552
-
553
- with gr.Column(scale=2, min_width=500):
554
- result_image_p2p = gr.Image(interactive=False, label="Result")
555
- with gr.Row():
556
- # Photo Examples
557
- root_path = "resource/demo/example"
558
- with gr.Column():
559
- gr.Examples(
560
- examples=[
561
- os.path.join(root_path, "person", "men", _)
562
- for _ in os.listdir(os.path.join(root_path, "person", "men"))
563
- ],
564
- examples_per_page=4,
565
- inputs=image_path_p2p,
566
- label="Person Examples ①",
567
- )
568
- gr.Examples(
569
- examples=[
570
- os.path.join(root_path, "person", "women", _)
571
- for _ in os.listdir(os.path.join(root_path, "person", "women"))
572
- ],
573
- examples_per_page=4,
574
- inputs=image_path_p2p,
575
- label="Person Examples ②",
576
- )
577
- gr.Markdown(
578
- '<span style="color: #808080; font-size: small;">*Person examples come from the demos of <a href="https://huggingface.co/spaces/levihsu/OOTDiffusion">OOTDiffusion</a> and <a href="https://www.outfitanyone.org">OutfitAnyone</a>. </span>'
579
- )
580
- with gr.Column():
581
- gr.Examples(
582
- examples=[
583
- os.path.join(root_path, "condition", "upper", _)
584
- for _ in os.listdir(os.path.join(root_path, "condition", "upper"))
585
- ],
586
- examples_per_page=4,
587
- inputs=cloth_image_p2p,
588
- label="Condition Upper Examples",
589
- )
590
- gr.Examples(
591
- examples=[
592
- os.path.join(root_path, "condition", "overall", _)
593
- for _ in os.listdir(os.path.join(root_path, "condition", "overall"))
594
- ],
595
- examples_per_page=4,
596
- inputs=cloth_image_p2p,
597
- label="Condition Overall Examples",
598
- )
599
- condition_person_exm = gr.Examples(
600
- examples=[
601
- os.path.join(root_path, "condition", "person", _)
602
- for _ in os.listdir(os.path.join(root_path, "condition", "person"))
603
- ],
604
- examples_per_page=4,
605
- inputs=cloth_image_p2p,
606
- label="Condition Reference Person Examples",
607
- )
608
- gr.Markdown(
609
- '<span style="color: #808080; font-size: small;">*Condition examples come from the Internet. </span>'
610
- )
611
-
612
- image_path_p2p.change(
613
- person_example_fn, inputs=image_path_p2p, outputs=person_image_p2p
614
- )
615
-
616
- submit_p2p.click(
617
- submit_function_p2p,
618
- [
619
- person_image_p2p,
620
- cloth_image_p2p,
621
- num_inference_steps_p2p,
622
- guidance_scale_p2p,
623
- seed_p2p],
624
- result_image_p2p,
625
- )
626
-
627
  demo.queue().launch(share=True, show_error=True)
628
 
629
 
 
383
  result_image,
384
  )
385
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
386
  demo.queue().launch(share=True, show_error=True)
387
 
388