Skip to content

Commit b66dfb5

Browse files
Bias backup (AUTOMATIC1111#7)
* Prevent uncessary bias backup * Fix LoRA bias error --------- Co-authored-by: AUTOMATIC1111 <16777216c@gmail.com>
1 parent 5b49881 commit b66dfb5

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

extensions-builtin/Lora/networks.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -378,13 +378,18 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
378378
self.network_weights_backup = weights_backup
379379

380380
bias_backup = getattr(self, "network_bias_backup", None)
381-
if bias_backup is None:
381+
if bias_backup is None and wanted_names != ():
382382
if isinstance(self, torch.nn.MultiheadAttention) and self.out_proj.bias is not None:
383383
bias_backup = self.out_proj.bias.to(devices.cpu, copy=True)
384384
elif getattr(self, 'bias', None) is not None:
385385
bias_backup = self.bias.to(devices.cpu, copy=True)
386386
else:
387387
bias_backup = None
388+
389+
# Unlike weight which always has value, some modules don't have bias.
390+
# Only report if bias is not None and current bias are not unchanged.
391+
if bias_backup is not None and current_names != ():
392+
raise RuntimeError("no backup bias found and current bias are not unchanged")
388393
self.network_bias_backup = bias_backup
389394

390395
if current_names != wanted_names:

0 commit comments

Comments
 (0)