Spaces:
Running
on
Zero
Running
on
Zero
Update pipeline.py
Browse files- pipeline.py +1 -5
pipeline.py
CHANGED
@@ -81,6 +81,7 @@ class FluxWithCFGPipeline(StableDiffusion3Pipeline):
|
|
81 |
text_encoder_3: None,
|
82 |
):
|
83 |
super().__init__()
|
|
|
84 |
self.register_modules(
|
85 |
vae=vae,
|
86 |
text_encoder=text_encoder,
|
@@ -100,11 +101,6 @@ class FluxWithCFGPipeline(StableDiffusion3Pipeline):
|
|
100 |
self.tokenizer.model_max_length if hasattr(self, "tokenizer") and self.tokenizer is not None else 77
|
101 |
)
|
102 |
self.default_sample_size = 64
|
103 |
-
|
104 |
-
_optional_components = [transformer, scheduler, vae, text_encoder, text_encoder_2, text_encoder_3, tokenizer, tokenizer_2, tokenizer_3]
|
105 |
-
model_cpu_offload_seq = "text_encoder->text_encoder_2->text_encoder_3->transformer->vae"
|
106 |
-
_callback_tensor_inputs = ["latents", "prompt_embeds", "negative_prompt_embeds", "negative_pooled_prompt_embeds"]
|
107 |
-
|
108 |
def __call__(
|
109 |
self,
|
110 |
prompt: Union[str, List[str]] = None,
|
|
|
81 |
text_encoder_3: None,
|
82 |
):
|
83 |
super().__init__()
|
84 |
+
|
85 |
self.register_modules(
|
86 |
vae=vae,
|
87 |
text_encoder=text_encoder,
|
|
|
101 |
self.tokenizer.model_max_length if hasattr(self, "tokenizer") and self.tokenizer is not None else 77
|
102 |
)
|
103 |
self.default_sample_size = 64
|
|
|
|
|
|
|
|
|
|
|
104 |
def __call__(
|
105 |
self,
|
106 |
prompt: Union[str, List[str]] = None,
|