Spaces:
Running
on
Zero
Running
on
Zero
Update pipeline.py
Browse files- pipeline.py +0 -3
pipeline.py
CHANGED
@@ -499,8 +499,6 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
|
|
499 |
cfg: Optional[bool] = True,
|
500 |
return_dict: bool = True,
|
501 |
joint_attention_kwargs: Optional[Dict[str, Any]] = None,
|
502 |
-
callback_on_step_end: Optional[Callable[[int, int, Dict], None]] = None,
|
503 |
-
callback_on_step_end_tensor_inputs: List[str] = ["latents"],
|
504 |
max_sequence_length: int = 512,
|
505 |
):
|
506 |
height = height or self.default_sample_size * self.vae_scale_factor
|
@@ -514,7 +512,6 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
|
|
514 |
width,
|
515 |
prompt_embeds=prompt_embeds,
|
516 |
pooled_prompt_embeds=pooled_prompt_embeds,
|
517 |
-
callback_on_step_end_tensor_inputs=callback_on_step_end_tensor_inputs,
|
518 |
max_sequence_length=max_sequence_length,
|
519 |
)
|
520 |
|
|
|
499 |
cfg: Optional[bool] = True,
|
500 |
return_dict: bool = True,
|
501 |
joint_attention_kwargs: Optional[Dict[str, Any]] = None,
|
|
|
|
|
502 |
max_sequence_length: int = 512,
|
503 |
):
|
504 |
height = height or self.default_sample_size * self.vae_scale_factor
|
|
|
512 |
width,
|
513 |
prompt_embeds=prompt_embeds,
|
514 |
pooled_prompt_embeds=pooled_prompt_embeds,
|
|
|
515 |
max_sequence_length=max_sequence_length,
|
516 |
)
|
517 |
|