Skip to content

Commit

Permalink
Support lora keys with lora_prior_unet_ and lora_prior_te_
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Feb 23, 2024
1 parent e1cb93c commit 1e5f0f6
Showing 1 changed file with 3 additions and 0 deletions.
3 changes: 3 additions & 0 deletions comfy/lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,8 @@ def model_lora_keys_clip(model, key_map={}):
key_map[lora_key] = k
lora_key = "text_encoder.text_model.encoder.layers.{}.{}".format(b, c) #diffusers lora
key_map[lora_key] = k
lora_key = "lora_prior_te_text_model_encoder_layers_{}_{}".format(b, LORA_CLIP_MAP[c]) #cascade lora: TODO put lora key prefix in the model config
key_map[lora_key] = k

return key_map

Expand All @@ -207,6 +209,7 @@ def model_lora_keys_unet(model, key_map={}):
if k.startswith("diffusion_model.") and k.endswith(".weight"):
key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_")
key_map["lora_unet_{}".format(key_lora)] = k
key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config

diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config)
for k in diffusers_keys:
Expand Down

0 comments on commit 1e5f0f6

Please sign in to comment.