From e19915e0b76c1518f2beb1967c6eca6381658e02 Mon Sep 17 00:00:00 2001 From: yiyixuxu Date: Tue, 18 Jun 2024 02:38:12 +0000 Subject: [PATCH 1/3] add --- src/diffusers/models/attention.py | 4 ++-- src/diffusers/models/transformers/transformer_sd3.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index e19b087431a2..2a81f357d48b 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -128,9 +128,9 @@ def __init__(self, dim, num_attention_heads, attention_head_dim, context_pre_onl query_dim=dim, cross_attention_dim=None, added_kv_proj_dim=dim, - dim_head=attention_head_dim // num_attention_heads, + dim_head=attention_head_dim, heads=num_attention_heads, - out_dim=attention_head_dim, + out_dim=dim, context_pre_only=context_pre_only, bias=True, processor=processor, diff --git a/src/diffusers/models/transformers/transformer_sd3.py b/src/diffusers/models/transformers/transformer_sd3.py index 4b159511e25d..566bfdc1c6c0 100644 --- a/src/diffusers/models/transformers/transformer_sd3.py +++ b/src/diffusers/models/transformers/transformer_sd3.py @@ -95,7 +95,7 @@ def __init__( JointTransformerBlock( dim=self.inner_dim, num_attention_heads=self.config.num_attention_heads, - attention_head_dim=self.inner_dim, + attention_head_dim=self.config.attention_head_dim, context_pre_only=i == num_layers - 1, ) for i in range(self.config.num_layers) From 05152f0df270b6cbfcdc6c40b627e2c5131bfbc9 Mon Sep 17 00:00:00 2001 From: yiyixuxu Date: Fri, 28 Jun 2024 20:38:35 +0000 Subject: [PATCH 2/3] update sd3 controlnet --- src/diffusers/models/controlnet_sd3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/controlnet_sd3.py b/src/diffusers/models/controlnet_sd3.py index 629cb661eda5..0decec482890 100644 --- a/src/diffusers/models/controlnet_sd3.py +++ b/src/diffusers/models/controlnet_sd3.py @@ -81,7 +81,7 @@ def __init__( JointTransformerBlock( dim=self.inner_dim, num_attention_heads=num_attention_heads, - attention_head_dim=self.inner_dim, + attention_head_dim=attention_head_dim, context_pre_only=False, ) for i in range(num_layers) From 1796a114e75f35949fdd746ab90bc2cf20738f39 Mon Sep 17 00:00:00 2001 From: YiYi Xu Date: Mon, 1 Jul 2024 06:43:22 -1000 Subject: [PATCH 3/3] Update src/diffusers/models/controlnet_sd3.py --- src/diffusers/models/controlnet_sd3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/controlnet_sd3.py b/src/diffusers/models/controlnet_sd3.py index 0decec482890..4757670b55ed 100644 --- a/src/diffusers/models/controlnet_sd3.py +++ b/src/diffusers/models/controlnet_sd3.py @@ -81,7 +81,7 @@ def __init__( JointTransformerBlock( dim=self.inner_dim, num_attention_heads=num_attention_heads, - attention_head_dim=attention_head_dim, + attention_head_dim=self.config.attention_head_dim, context_pre_only=False, ) for i in range(num_layers)