From 5058d27f128a01dd06ac0fb86bfcfda078051883 Mon Sep 17 00:00:00 2001 From: Charchit Sharma Date: Fri, 1 Dec 2023 20:56:58 +0530 Subject: [PATCH] added attention_head_dim, attention_type, resolution_idx (#6011) --- .../pipelines/versatile_diffusion/modeling_text_unet.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py index a940cec5e46a..0a2f1ca17cb0 100644 --- a/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py +++ b/src/diffusers/pipelines/versatile_diffusion/modeling_text_unet.py @@ -50,6 +50,9 @@ def get_down_block( resnet_eps, resnet_act_fn, num_attention_heads, + transformer_layers_per_block, + attention_type, + attention_head_dim, resnet_groups=None, cross_attention_dim=None, downsample_padding=None, @@ -113,6 +116,10 @@ def get_up_block( resnet_eps, resnet_act_fn, num_attention_heads, + transformer_layers_per_block, + resolution_idx, + attention_type, + attention_head_dim, resnet_groups=None, cross_attention_dim=None, dual_cross_attention=False,