From 1e5f0f66be93182f30d5d453dc8df50f3f7c1826 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Fri, 23 Feb 2024 12:21:20 -0500 Subject: [PATCH] Support lora keys with lora_prior_unet_ and lora_prior_te_ --- comfy/lora.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/comfy/lora.py b/comfy/lora.py index 5e4009b47f9..82d57fe1306 100644 --- a/comfy/lora.py +++ b/comfy/lora.py @@ -197,6 +197,8 @@ def model_lora_keys_clip(model, key_map={}): key_map[lora_key] = k lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora key_map[lora_key] = k + lora_key = "lora_prior_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #cascade lora: TODO put lora key prefix in the model config + key_map[lora_key] = k return key_map @@ -207,6 +209,7 @@ def model_lora_keys_unet(model, key_map={}): if k.startswith("diffusion_model.") and k.endswith(".weight"): key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_") key_map["lora_unet_{}".format(key_lora)] = k + key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config) for k in diffusers_keys: