File tree Expand file tree Collapse file tree 2 files changed +15
-1
lines changed
src/diffusers/pipelines/flux Expand file tree Collapse file tree 2 files changed +15
-1
lines changed Original file line number Diff line number Diff line change @@ -790,7 +790,10 @@ def __call__(
790
790
lora_scale = (
791
791
self .joint_attention_kwargs .get ("scale" , None ) if self .joint_attention_kwargs is not None else None
792
792
)
793
- do_true_cfg = true_cfg_scale > 1 and negative_prompt is not None
793
+ has_neg_prompt = negative_prompt is not None or (
794
+ negative_prompt_embeds is not None and negative_pooled_prompt_embeds is not None
795
+ )
796
+ do_true_cfg = true_cfg_scale > 1 and has_neg_prompt
794
797
(
795
798
prompt_embeds ,
796
799
pooled_prompt_embeds ,
Original file line number Diff line number Diff line change @@ -209,6 +209,17 @@ def test_flux_image_output_shape(self):
209
209
output_height , output_width , _ = image .shape
210
210
assert (output_height , output_width ) == (expected_height , expected_width )
211
211
212
+ def test_flux_true_cfg (self ):
213
+ pipe = self .pipeline_class (** self .get_dummy_components ()).to (torch_device )
214
+ inputs = self .get_dummy_inputs (torch_device )
215
+ inputs .pop ("generator" )
216
+
217
+ no_true_cfg_out = pipe (** inputs , generator = torch .manual_seed (0 )).images [0 ]
218
+ inputs ["negative_prompt" ] = "bad quality"
219
+ inputs ["true_cfg_scale" ] = 2.0
220
+ true_cfg_out = pipe (** inputs , generator = torch .manual_seed (0 )).images [0 ]
221
+ assert not np .allclose (no_true_cfg_out , true_cfg_out )
222
+
212
223
213
224
@nightly
214
225
@require_big_gpu_with_torch_cuda
You can’t perform that action at this time.
0 commit comments