Spaces:
Running
on
Zero
Running
on
Zero
| from diffusers import DiffusionPipeline | |
| from diffusers import AutoPipelineForText2Image | |
| from diffusers import StableCascadeDecoderPipeline, StableCascadePriorPipeline | |
| import torch | |
| def load_huggingface_model(model_name, model_type): | |
| if model_name == "SD-turbo": | |
| pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sd-turbo", torch_dtype=torch.float16, variant="fp16") | |
| pipe = pipe.to("cuda") | |
| elif model_name == "SDXL-turbo": | |
| pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16") | |
| pipe = pipe.to("cuda") | |
| elif model_name == "Stable-cascade": | |
| prior = StableCascadePriorPipeline.from_pretrained("stabilityai/stable-cascade-prior", variant="bf16", torch_dtype=torch.bfloat16) | |
| decoder = StableCascadeDecoderPipeline.from_pretrained("stabilityai/stable-cascade", variant="bf16", torch_dtype=torch.float16) | |
| pipe = [prior, decoder] | |
| else: | |
| raise NotImplementedError | |
| # if model_name == "SD-turbo": | |
| # pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sd-turbo") | |
| # elif model_name == "SDXL-turbo": | |
| # pipe = AutoPipelineForText2Image.from_pretrained("stabilityai/sdxl-turbo") | |
| # else: | |
| # raise NotImplementedError | |
| # pipe = pipe.to("cpu") | |
| return pipe | |
| if __name__ == "__main__": | |
| for name in ["SD-turbo", "SDXL-turbo"]: #"SD-turbo", "SDXL-turbo" | |
| pipe = load_huggingface_model(name, "text2image") | |
| # for name in ["IF-I-XL-v1.0"]: | |
| # pipe = load_huggingface_model(name, 'text2image') | |
| # pipe = DiffusionPipeline.from_pretrained("DeepFloyd/IF-I-XL-v1.0", variant="fp16", torch_dtype=torch.float16) |