From 1b61c34e6bb1d36d26664cd12177395e0c9f04ba Mon Sep 17 00:00:00 2001 From: Colle Date: Thu, 26 Jun 2025 18:51:33 +0200 Subject: [PATCH] Flux: pass joint_attention_kwargs when gradient_checkpointing --- src/diffusers/models/transformers/transformer_flux.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/diffusers/models/transformers/transformer_flux.py b/src/diffusers/models/transformers/transformer_flux.py index 3af1de2ad0be..32379a559c34 100644 --- a/src/diffusers/models/transformers/transformer_flux.py +++ b/src/diffusers/models/transformers/transformer_flux.py @@ -485,6 +485,7 @@ def forward( encoder_hidden_states, temb, image_rotary_emb, + joint_attention_kwargs, ) else: @@ -516,6 +517,7 @@ def forward( hidden_states, temb, image_rotary_emb, + joint_attention_kwargs, ) else: