Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,13 +12,25 @@ token_hf = os.environ["HF_TOKEN"]
|
|
12 |
dtype = torch.bfloat16
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
14 |
|
15 |
-
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype)
|
16 |
-
pipe.load_lora_weights(hf_hub_download("ByteDance/Hyper-SD", "Hyper-FLUX.1-dev-8steps-lora.safetensors"))
|
17 |
-
pipe.fuse_lora(lora_scale=0.125)
|
18 |
-
pipe.to(device="cuda", dtype=dtype)
|
19 |
|
20 |
# pipe = FluxPipeline.from_pretrained("sayakpaul/FLUX.1-merged", torch_dtype=torch.bfloat16).to(device)
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
MAX_SEED = np.iinfo(np.int32).max
|
23 |
MAX_IMAGE_SIZE = 2048
|
24 |
|
|
|
12 |
dtype = torch.bfloat16
|
13 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
14 |
|
15 |
+
# pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype)
|
16 |
+
# pipe.load_lora_weights(hf_hub_download("ByteDance/Hyper-SD", "Hyper-FLUX.1-dev-8steps-lora.safetensors"))
|
17 |
+
# pipe.fuse_lora(lora_scale=0.125)
|
18 |
+
# pipe.to(device="cuda", dtype=dtype)
|
19 |
|
20 |
# pipe = FluxPipeline.from_pretrained("sayakpaul/FLUX.1-merged", torch_dtype=torch.bfloat16).to(device)
|
21 |
|
22 |
+
model_id = "black-forest-labs/FLUX.1-dev"
|
23 |
+
adapter_id = "alimama-creative/FLUX.1-Turbo-Alpha"
|
24 |
+
|
25 |
+
pipe = FluxPipeline.from_pretrained(
|
26 |
+
model_id,
|
27 |
+
torch_dtype=dtype
|
28 |
+
)
|
29 |
+
pipe.to(device)
|
30 |
+
|
31 |
+
pipe.load_lora_weights(adapter_id)
|
32 |
+
pipe.fuse_lora()
|
33 |
+
|
34 |
MAX_SEED = np.iinfo(np.int32).max
|
35 |
MAX_IMAGE_SIZE = 2048
|
36 |
|