Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added Civitai LoRAs conversion to PEFT, PEFT LoRAs conversion to webui #596

Merged
merged 2 commits into from
Jun 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 29 additions & 3 deletions examples/lora_dreambooth/convert_kohya_ss_sd_lora_to_peft.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import argparse
import os
from typing import List, Optional
import re
from typing import Callable, List, Optional, Union

import safetensors
import torch
Expand Down Expand Up @@ -49,6 +50,17 @@ def get_modules_names(
return sorted(modules_names)


def get_rank_alpha(
layer_names: List[str],
value_getter: Callable[[str], Union[int, float]],
filter_string: str,
) -> Union[int, float]:
values = [value_getter(p) for p in filter(lambda x: bool(re.search(filter_string, x)), layer_names)]
value = values[0]
assert all(v == value for v in values), f"All LoRA ranks and alphas must be same, found: {values}"
return value


if __name__ == "__main__":
parser = argparse.ArgumentParser()

Expand Down Expand Up @@ -81,8 +93,22 @@ def get_modules_names(
with safetensors.safe_open(args.kohya_lora_path, framework="pt", device="cpu") as f:
# Extract information about LoRA structure
metadata = f.metadata()
lora_r = lora_text_encoder_r = int(metadata["ss_network_dim"])
lora_alpha = lora_text_encoder_alpha = float(metadata["ss_network_alpha"])
if (metadata is not None) and ("ss_network_dim" in metadata) and ("ss_network_alpha" in metadata):
# LoRA rank and alpha are in safetensors metadata, just get it
lora_r = lora_text_encoder_r = int(metadata["ss_network_dim"])
lora_alpha = lora_text_encoder_alpha = float(metadata["ss_network_alpha"])
else:
# LoRA rank and alpha are not present, so infer them
lora_r = get_rank_alpha(
f.keys(), lambda n: f.get_tensor(n).size(0), f"^{LORA_PREFIX_UNET}\w+\.lora_down\.weight$"
)
lora_text_encoder_r = get_rank_alpha(
f.keys(), lambda n: f.get_tensor(n).size(0), f"^{LORA_PREFIX_TEXT_ENCODER}\w+\.lora_down\.weight$"
)
lora_alpha = get_rank_alpha(f.keys(), lambda n: f.get_tensor(n).item(), f"^{LORA_PREFIX_UNET}\w+\.alpha$")
lora_text_encoder_alpha = get_rank_alpha(
f.keys(), lambda n: f.get_tensor(n).item(), f"^{LORA_PREFIX_TEXT_ENCODER}\w+\.alpha$"
)

# Create LoRA for text encoder
text_encoder_config = LoraConfig(
Expand Down
97 changes: 97 additions & 0 deletions examples/lora_dreambooth/convert_peft_sd_lora_to_kohya_ss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import argparse
import os
from typing import Dict

import torch
from diffusers import UNet2DConditionModel
from safetensors.torch import save_file
from transformers import CLIPTextModel

from peft import PeftModel, get_peft_model_state_dict


# Default kohya_ss LoRA replacement modules
# https://github.com/kohya-ss/sd-scripts/blob/c924c47f374ac1b6e33e71f82948eb1853e2243f/networks/lora.py#L664
LORA_PREFIX_UNET = "lora_unet"
LORA_PREFIX_TEXT_ENCODER = "lora_te"
LORA_ADAPTER_NAME = "default"


def get_module_kohya_state_dict(module: PeftModel, prefix: str, dtype: torch.dtype) -> Dict[str, torch.Tensor]:
kohya_ss_state_dict = {}
for peft_key, weight in get_peft_model_state_dict(module).items():
kohya_key = peft_key.replace("base_model.model", prefix)
kohya_key = kohya_key.replace("lora_A", "lora_down")
kohya_key = kohya_key.replace("lora_B", "lora_up")
kohya_key = kohya_key.replace(".", "_", kohya_key.count(".") - 2)
kohya_ss_state_dict[kohya_key] = weight.to(dtype)

# Set alpha parameter
if "lora_down" in kohya_key:
alpha_key = f'{kohya_key.split(".")[0]}.alpha'
kohya_ss_state_dict[alpha_key] = torch.tensor(module.peft_config[LORA_ADAPTER_NAME].lora_alpha).to(dtype)

return kohya_ss_state_dict


if __name__ == "__main__":
parser = argparse.ArgumentParser()

parser.add_argument(
"--sd_checkpoint",
default=None,
type=str,
required=True,
help="Path to pretrained model or model identifier from huggingface.co/models.",
)

parser.add_argument(
"--sd_checkpoint_revision",
type=str,
default=None,
required=False,
help="Revision of pretrained model identifier from huggingface.co/models.",
)

parser.add_argument("--peft_lora_path", default=None, type=str, required=True, help="Path to peft trained LoRA")

parser.add_argument(
"--dump_path",
default=None,
type=str,
required=True,
help="Path to the output safetensors file for use with webui.",
)

parser.add_argument("--half", action="store_true", help="Save weights in half precision.")
args = parser.parse_args()

# Store kohya_ss state dict
kohya_ss_state_dict = {}
dtype = torch.float16 if args.half else torch.float32

# Load Text Encoder LoRA model
text_encoder_peft_lora_path = os.path.join(args.peft_lora_path, "text_encoder")
if os.path.exists(text_encoder_peft_lora_path):
text_encoder = CLIPTextModel.from_pretrained(
args.sd_checkpoint, subfolder="text_encoder", revision=args.sd_checkpoint_revision
)
text_encoder = PeftModel.from_pretrained(
text_encoder, text_encoder_peft_lora_path, adapter_name=LORA_ADAPTER_NAME
)
kohya_ss_state_dict.update(get_module_kohya_state_dict(text_encoder, LORA_PREFIX_TEXT_ENCODER, dtype))

# Load UNet LoRA model
unet_peft_lora_path = os.path.join(args.peft_lora_path, "unet")
if os.path.exists(unet_peft_lora_path):
unet = UNet2DConditionModel.from_pretrained(
args.sd_checkpoint, subfolder="unet", revision=args.sd_checkpoint_revision
)
unet = PeftModel.from_pretrained(unet, unet_peft_lora_path, adapter_name=LORA_ADAPTER_NAME)
kohya_ss_state_dict.update(get_module_kohya_state_dict(unet, LORA_PREFIX_UNET, dtype))

# Save state dict
save_file(
kohya_ss_state_dict,
args.dump_path,
)