From 56c98402bedaa06958f47e72f0da2a5da84fb508 Mon Sep 17 00:00:00 2001 From: Kohya S Date: Fri, 10 Mar 2023 20:30:11 +0900 Subject: [PATCH 1/2] fix bf16 model , conv2d 3x3, >320 dim close #127 --- README.md | 7 +++++++ scripts/lora_compvis.py | 12 ++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 35a3613..5095e90 100644 --- a/README.md +++ b/README.md @@ -160,6 +160,13 @@ Composable LoRA はサブプロンプトごとに LoRA の適用有無を切り ## Change History +- 9 Mar. 2023, 2023/3/9: Release v0.5.1 + - Fix the model saved with `bf16` causes an error. https://github.com/kohya-ss/sd-webui-additional-networks/issues/127 + - Fix some Conv2d-3x3 LoRA modules are not effective. https://github.com/kohya-ss/sd-scripts/issues/275 + - Fix LoRA modules with higher dim (rank) > 320 causes an error. + - `bf16` で学習されたモデルが読み込めない不具合を修正しました。 https://github.com/kohya-ss/sd-webui-additional-networks/issues/127 + - いくつかの Conv2d-3x3 LoRA モジュールが有効にならない不具合を修正しました。 https://github.com/kohya-ss/sd-scripts/issues/275 + - dim (rank) が 320 を超えるLoRAモデルが読み込めない不具合を修正しました。 - 8 Mar. 2023, 2023/3/8: Release v0.5.0 - Support current version of [LoCon](https://github.com/KohakuBlueleaf/LoCon). __Thank you very much KohakuBlueleaf for your help!__ - LoCon will be enhanced in the future. Compatibility for future versions is not guaranteed. diff --git a/scripts/lora_compvis.py b/scripts/lora_compvis.py index a879cfc..ef361bc 100644 --- a/scripts/lora_compvis.py +++ b/scripts/lora_compvis.py @@ -34,9 +34,9 @@ def __init__(self, lora_name, org_module: torch.nn.Module, multiplier=1.0, lora_ in_dim = org_module.in_channels out_dim = org_module.out_channels - self.lora_dim = min(self.lora_dim, in_dim, out_dim) - if self.lora_dim != lora_dim: - print(f"{lora_name} dim (rank) is changed to: {self.lora_dim}") + # self.lora_dim = min(self.lora_dim, in_dim, out_dim) + # if self.lora_dim != lora_dim: + # print(f"{lora_name} dim (rank) is changed to: {self.lora_dim}") kernel_size = org_module.kernel_size stride = org_module.stride @@ -128,7 +128,7 @@ def create_network_and_apply_compvis(du_state_dict, multiplier_tenc, multiplier_ lora_name = key.split('.')[0] if 'alpha' in key: - modules_alpha[lora_name] = float(value.detach().cpu().numpy()) + modules_alpha[lora_name] = float(value.detach().to(torch.float).cpu().numpy()) elif 'lora_down' in key: dim = value.size()[0] modules_dim[lora_name] = dim @@ -346,7 +346,7 @@ def create_modules(prefix, root_module: torch.nn.Module, target_replace_modules, if '_resblocks_23_' in lora_name: # ignore last block in StabilityAi Text Encoder break if lora_name not in comp_vis_loras_dim_alpha: - break + continue dim, alpha = comp_vis_loras_dim_alpha[lora_name] lora = LoRAModule(lora_name, child_module, multiplier, dim, alpha) @@ -363,7 +363,7 @@ def create_modules(prefix, root_module: torch.nn.Module, target_replace_modules, lora_name = module_name + '_' + suffix if lora_name not in comp_vis_loras_dim_alpha: - break + continue dim, alpha = comp_vis_loras_dim_alpha[lora_name] lora_info = LoRAInfo(lora_name, module_name, child_module, multiplier, dim, alpha) loras.append(lora_info) From 4ba0a58100825653956b6b4df9497f1f4e556771 Mon Sep 17 00:00:00 2001 From: Kohya S Date: Fri, 10 Mar 2023 20:36:25 +0900 Subject: [PATCH 2/2] clarify code --- scripts/lora_compvis.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lora_compvis.py b/scripts/lora_compvis.py index ef361bc..5a89c0c 100644 --- a/scripts/lora_compvis.py +++ b/scripts/lora_compvis.py @@ -46,12 +46,12 @@ def __init__(self, lora_name, org_module: torch.nn.Module, multiplier=1.0, lora_ else: in_dim = org_module.in_features out_dim = org_module.out_features - self.lora_down = torch.nn.Linear(in_dim, lora_dim, bias=False) - self.lora_up = torch.nn.Linear(lora_dim, out_dim, bias=False) + self.lora_down = torch.nn.Linear(in_dim, self.lora_dim, bias=False) + self.lora_up = torch.nn.Linear(self.lora_dim, out_dim, bias=False) if type(alpha) == torch.Tensor: alpha = alpha.detach().float().numpy() # without casting, bf16 causes error - alpha = lora_dim if alpha is None or alpha == 0 else alpha + alpha = self.lora_dim if alpha is None or alpha == 0 else alpha self.scale = alpha / self.lora_dim self.register_buffer('alpha', torch.tensor(alpha)) # 定数として扱える